aboutsummaryrefslogtreecommitdiffstats
path: root/activerecord/lib
diff options
context:
space:
mode:
Diffstat (limited to 'activerecord/lib')
-rw-r--r--activerecord/lib/active_record.rb183
-rw-r--r--activerecord/lib/active_record/aggregations.rb284
-rw-r--r--activerecord/lib/active_record/association_relation.rb40
-rw-r--r--activerecord/lib/active_record/associations.rb1869
-rw-r--r--activerecord/lib/active_record/associations/alias_tracker.rb94
-rw-r--r--activerecord/lib/active_record/associations/association.rb284
-rw-r--r--activerecord/lib/active_record/associations/association_scope.rb173
-rw-r--r--activerecord/lib/active_record/associations/belongs_to_association.rb113
-rw-r--r--activerecord/lib/active_record/associations/belongs_to_polymorphic_association.rb42
-rw-r--r--activerecord/lib/active_record/associations/builder/association.rb145
-rw-r--r--activerecord/lib/active_record/associations/builder/belongs_to.rb150
-rw-r--r--activerecord/lib/active_record/associations/builder/collection_association.rb82
-rw-r--r--activerecord/lib/active_record/associations/builder/has_and_belongs_to_many.rb135
-rw-r--r--activerecord/lib/active_record/associations/builder/has_many.rb17
-rw-r--r--activerecord/lib/active_record/associations/builder/has_one.rb30
-rw-r--r--activerecord/lib/active_record/associations/builder/singular_association.rb42
-rw-r--r--activerecord/lib/active_record/associations/collection_association.rb506
-rw-r--r--activerecord/lib/active_record/associations/collection_proxy.rb1158
-rw-r--r--activerecord/lib/active_record/associations/foreign_association.rb13
-rw-r--r--activerecord/lib/active_record/associations/has_many_association.rb135
-rw-r--r--activerecord/lib/active_record/associations/has_many_through_association.rb216
-rw-r--r--activerecord/lib/active_record/associations/has_one_association.rb111
-rw-r--r--activerecord/lib/active_record/associations/has_one_through_association.rb42
-rw-r--r--activerecord/lib/active_record/associations/join_dependency.rb287
-rw-r--r--activerecord/lib/active_record/associations/join_dependency/join_association.rb61
-rw-r--r--activerecord/lib/active_record/associations/join_dependency/join_base.rb23
-rw-r--r--activerecord/lib/active_record/associations/join_dependency/join_part.rb64
-rw-r--r--activerecord/lib/active_record/associations/preloader.rb204
-rw-r--r--activerecord/lib/active_record/associations/preloader/association.rb143
-rw-r--r--activerecord/lib/active_record/associations/preloader/belongs_to.rb17
-rw-r--r--activerecord/lib/active_record/associations/preloader/collection_association.rb19
-rw-r--r--activerecord/lib/active_record/associations/preloader/has_many.rb17
-rw-r--r--activerecord/lib/active_record/associations/preloader/has_many_through.rb21
-rw-r--r--activerecord/lib/active_record/associations/preloader/has_one.rb17
-rw-r--r--activerecord/lib/active_record/associations/preloader/has_one_through.rb11
-rw-r--r--activerecord/lib/active_record/associations/preloader/singular_association.rb20
-rw-r--r--activerecord/lib/active_record/associations/preloader/through_association.rb111
-rw-r--r--activerecord/lib/active_record/associations/singular_association.rb81
-rw-r--r--activerecord/lib/active_record/associations/through_association.rb108
-rw-r--r--activerecord/lib/active_record/attribute.rb242
-rw-r--r--activerecord/lib/active_record/attribute/user_provided_default.rb32
-rw-r--r--activerecord/lib/active_record/attribute_assignment.rb93
-rw-r--r--activerecord/lib/active_record/attribute_decorators.rb90
-rw-r--r--activerecord/lib/active_record/attribute_methods.rb449
-rw-r--r--activerecord/lib/active_record/attribute_methods/before_type_cast.rb78
-rw-r--r--activerecord/lib/active_record/attribute_methods/dirty.rb252
-rw-r--r--activerecord/lib/active_record/attribute_methods/primary_key.rb141
-rw-r--r--activerecord/lib/active_record/attribute_methods/query.rb42
-rw-r--r--activerecord/lib/active_record/attribute_methods/read.rb81
-rw-r--r--activerecord/lib/active_record/attribute_methods/serialization.rb69
-rw-r--r--activerecord/lib/active_record/attribute_methods/time_zone_conversion.rb91
-rw-r--r--activerecord/lib/active_record/attribute_methods/write.rb64
-rw-r--r--activerecord/lib/active_record/attribute_mutation_tracker.rb111
-rw-r--r--activerecord/lib/active_record/attribute_set.rb113
-rw-r--r--activerecord/lib/active_record/attribute_set/builder.rb126
-rw-r--r--activerecord/lib/active_record/attribute_set/yaml_encoder.rb43
-rw-r--r--activerecord/lib/active_record/attributes.rb266
-rw-r--r--activerecord/lib/active_record/autosave_association.rb490
-rw-r--r--activerecord/lib/active_record/base.rb329
-rw-r--r--activerecord/lib/active_record/callbacks.rb349
-rw-r--r--activerecord/lib/active_record/coders/json.rb15
-rw-r--r--activerecord/lib/active_record/coders/yaml_column.rb50
-rw-r--r--activerecord/lib/active_record/collection_cache_key.rb51
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb989
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb67
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb485
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb138
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/quoting.rb216
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/savepoints.rb23
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb145
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb654
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb119
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb1372
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/transaction.rb274
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract_adapter.rb600
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb888
-rw-r--r--activerecord/lib/active_record/connection_adapters/column.rb91
-rw-r--r--activerecord/lib/active_record/connection_adapters/connection_specification.rb275
-rw-r--r--activerecord/lib/active_record/connection_adapters/determine_if_preparable_visitor.rb24
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/column.rb27
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/database_statements.rb97
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/explain_pretty_printer.rb72
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/quoting.rb44
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/schema_creation.rb73
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/schema_definitions.rb92
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/schema_dumper.rb75
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/schema_statements.rb131
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/type_metadata.rb35
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql2_adapter.rb125
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/column.rb17
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/database_statements.rb164
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/explain_pretty_printer.rb44
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid.rb33
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/array.rb82
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/bit.rb56
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/bit_varying.rb15
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/bytea.rb17
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/cidr.rb50
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/date_time.rb23
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/decimal.rb15
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/enum.rb21
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/hstore.rb71
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/inet.rb15
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/jsonb.rb15
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/legacy_point.rb45
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/money.rb41
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/oid.rb15
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/point.rb65
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/range.rb93
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/specialized_string.rb18
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/type_map_initializer.rb111
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/uuid.rb23
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/vector.rb28
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/xml.rb30
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/quoting.rb152
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/referential_integrity.rb51
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/schema_creation.rb17
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/schema_definitions.rb195
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/schema_dumper.rb45
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/schema_statements.rb670
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/type_metadata.rb39
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/utils.rb81
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb852
-rw-r--r--activerecord/lib/active_record/connection_adapters/schema_cache.rb118
-rw-r--r--activerecord/lib/active_record/connection_adapters/sql_type_metadata.rb34
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite3/explain_pretty_printer.rb21
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite3/quoting.rb62
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite3/schema_creation.rb17
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite3/schema_definitions.rb30
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite3/schema_dumper.rb19
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite3/schema_statements.rb94
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb537
-rw-r--r--activerecord/lib/active_record/connection_adapters/statement_pool.rb61
-rw-r--r--activerecord/lib/active_record/connection_handling.rb145
-rw-r--r--activerecord/lib/active_record/core.rb562
-rw-r--r--activerecord/lib/active_record/counter_cache.rb214
-rw-r--r--activerecord/lib/active_record/define_callbacks.rb22
-rw-r--r--activerecord/lib/active_record/dynamic_matchers.rb122
-rw-r--r--activerecord/lib/active_record/enum.rb241
-rw-r--r--activerecord/lib/active_record/errors.rb337
-rw-r--r--activerecord/lib/active_record/explain.rb50
-rw-r--r--activerecord/lib/active_record/explain_registry.rb32
-rw-r--r--activerecord/lib/active_record/explain_subscriber.rb34
-rw-r--r--activerecord/lib/active_record/fixture_set/file.rb82
-rw-r--r--activerecord/lib/active_record/fixtures.rb1070
-rw-r--r--activerecord/lib/active_record/gem_version.rb17
-rw-r--r--activerecord/lib/active_record/inheritance.rb254
-rw-r--r--activerecord/lib/active_record/integration.rb155
-rw-r--r--activerecord/lib/active_record/internal_metadata.rb45
-rw-r--r--activerecord/lib/active_record/legacy_yaml_adapter.rb48
-rw-r--r--activerecord/lib/active_record/locale/en.yml48
-rw-r--r--activerecord/lib/active_record/locking/optimistic.rb212
-rw-r--r--activerecord/lib/active_record/locking/pessimistic.rb88
-rw-r--r--activerecord/lib/active_record/log_subscriber.rb96
-rw-r--r--activerecord/lib/active_record/migration.rb1340
-rw-r--r--activerecord/lib/active_record/migration/command_recorder.rb240
-rw-r--r--activerecord/lib/active_record/migration/compatibility.rb170
-rw-r--r--activerecord/lib/active_record/migration/join_table.rb17
-rw-r--r--activerecord/lib/active_record/model_schema.rb515
-rw-r--r--activerecord/lib/active_record/nested_attributes.rb589
-rw-r--r--activerecord/lib/active_record/no_touching.rb58
-rw-r--r--activerecord/lib/active_record/null_relation.rb68
-rw-r--r--activerecord/lib/active_record/persistence.rb623
-rw-r--r--activerecord/lib/active_record/query_cache.rb49
-rw-r--r--activerecord/lib/active_record/querying.rb70
-rw-r--r--activerecord/lib/active_record/railtie.rb214
-rw-r--r--activerecord/lib/active_record/railties/console_sandbox.rb7
-rw-r--r--activerecord/lib/active_record/railties/controller_runtime.rb56
-rw-r--r--activerecord/lib/active_record/railties/databases.rake369
-rw-r--r--activerecord/lib/active_record/railties/jdbcmysql_error.rb18
-rw-r--r--activerecord/lib/active_record/readonly_attributes.rb24
-rw-r--r--activerecord/lib/active_record/reflection.rb1141
-rw-r--r--activerecord/lib/active_record/relation.rb719
-rw-r--r--activerecord/lib/active_record/relation/batches.rb275
-rw-r--r--activerecord/lib/active_record/relation/batches/batch_enumerator.rb69
-rw-r--r--activerecord/lib/active_record/relation/calculations.rb404
-rw-r--r--activerecord/lib/active_record/relation/delegation.rb121
-rw-r--r--activerecord/lib/active_record/relation/finder_methods.rb575
-rw-r--r--activerecord/lib/active_record/relation/from_clause.rb26
-rw-r--r--activerecord/lib/active_record/relation/merger.rb166
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder.rb133
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/array_handler.rb48
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/association_query_value.rb46
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/base_handler.rb19
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/basic_object_handler.rb20
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/polymorphic_array_value.rb54
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/range_handler.rb41
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/relation_handler.rb15
-rw-r--r--activerecord/lib/active_record/relation/query_attribute.rb26
-rw-r--r--activerecord/lib/active_record/relation/query_methods.rb1193
-rw-r--r--activerecord/lib/active_record/relation/record_fetch_warning.rb51
-rw-r--r--activerecord/lib/active_record/relation/spawn_methods.rb77
-rw-r--r--activerecord/lib/active_record/relation/where_clause.rb172
-rw-r--r--activerecord/lib/active_record/relation/where_clause_factory.rb35
-rw-r--r--activerecord/lib/active_record/result.rb149
-rw-r--r--activerecord/lib/active_record/runtime_registry.rb24
-rw-r--r--activerecord/lib/active_record/sanitization.rb216
-rw-r--r--activerecord/lib/active_record/schema.rb70
-rw-r--r--activerecord/lib/active_record/schema_dumper.rb255
-rw-r--r--activerecord/lib/active_record/schema_migration.rb56
-rw-r--r--activerecord/lib/active_record/scoping.rb106
-rw-r--r--activerecord/lib/active_record/scoping/default.rb152
-rw-r--r--activerecord/lib/active_record/scoping/named.rb189
-rw-r--r--activerecord/lib/active_record/secure_token.rb40
-rw-r--r--activerecord/lib/active_record/serialization.rb22
-rw-r--r--activerecord/lib/active_record/statement_cache.rb113
-rw-r--r--activerecord/lib/active_record/store.rb211
-rw-r--r--activerecord/lib/active_record/suppressor.rb61
-rw-r--r--activerecord/lib/active_record/table_metadata.rb75
-rw-r--r--activerecord/lib/active_record/tasks/database_tasks.rb327
-rw-r--r--activerecord/lib/active_record/tasks/mysql_database_tasks.rb162
-rw-r--r--activerecord/lib/active_record/tasks/postgresql_database_tasks.rb143
-rw-r--r--activerecord/lib/active_record/tasks/sqlite_database_tasks.rb83
-rw-r--r--activerecord/lib/active_record/timestamp.rb146
-rw-r--r--activerecord/lib/active_record/touch_later.rb64
-rw-r--r--activerecord/lib/active_record/transactions.rb499
-rw-r--r--activerecord/lib/active_record/translation.rb24
-rw-r--r--activerecord/lib/active_record/type.rb79
-rw-r--r--activerecord/lib/active_record/type/adapter_specific_registry.rb136
-rw-r--r--activerecord/lib/active_record/type/date.rb9
-rw-r--r--activerecord/lib/active_record/type/date_time.rb9
-rw-r--r--activerecord/lib/active_record/type/decimal_without_scale.rb15
-rw-r--r--activerecord/lib/active_record/type/hash_lookup_type_map.rb25
-rw-r--r--activerecord/lib/active_record/type/internal/timezone.rb17
-rw-r--r--activerecord/lib/active_record/type/json.rb30
-rw-r--r--activerecord/lib/active_record/type/serialized.rb67
-rw-r--r--activerecord/lib/active_record/type/text.rb11
-rw-r--r--activerecord/lib/active_record/type/time.rb21
-rw-r--r--activerecord/lib/active_record/type/type_map.rb62
-rw-r--r--activerecord/lib/active_record/type/unsigned_integer.rb17
-rw-r--r--activerecord/lib/active_record/type_caster.rb9
-rw-r--r--activerecord/lib/active_record/type_caster/connection.rb33
-rw-r--r--activerecord/lib/active_record/type_caster/map.rb23
-rw-r--r--activerecord/lib/active_record/validations.rb93
-rw-r--r--activerecord/lib/active_record/validations/absence.rb25
-rw-r--r--activerecord/lib/active_record/validations/associated.rb60
-rw-r--r--activerecord/lib/active_record/validations/length.rb26
-rw-r--r--activerecord/lib/active_record/validations/presence.rb68
-rw-r--r--activerecord/lib/active_record/validations/uniqueness.rb236
-rw-r--r--activerecord/lib/active_record/version.rb10
-rw-r--r--activerecord/lib/rails/generators/active_record.rb19
-rw-r--r--activerecord/lib/rails/generators/active_record/application_record/application_record_generator.rb26
-rw-r--r--activerecord/lib/rails/generators/active_record/application_record/templates/application_record.rb5
-rw-r--r--activerecord/lib/rails/generators/active_record/migration.rb35
-rw-r--r--activerecord/lib/rails/generators/active_record/migration/migration_generator.rb78
-rw-r--r--activerecord/lib/rails/generators/active_record/migration/templates/create_table_migration.rb24
-rw-r--r--activerecord/lib/rails/generators/active_record/migration/templates/migration.rb46
-rw-r--r--activerecord/lib/rails/generators/active_record/model/model_generator.rb48
-rw-r--r--activerecord/lib/rails/generators/active_record/model/templates/model.rb13
-rw-r--r--activerecord/lib/rails/generators/active_record/model/templates/module.rb7
250 files changed, 40364 insertions, 0 deletions
diff --git a/activerecord/lib/active_record.rb b/activerecord/lib/active_record.rb
new file mode 100644
index 0000000000..0c19fed9e1
--- /dev/null
+++ b/activerecord/lib/active_record.rb
@@ -0,0 +1,183 @@
+# frozen_string_literal: true
+
+#--
+# Copyright (c) 2004-2017 David Heinemeier Hansson
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#++
+
+require "active_support"
+require "active_support/rails"
+require "active_model"
+require "arel"
+
+require_relative "active_record/version"
+require_relative "active_record/attribute_set"
+
+module ActiveRecord
+ extend ActiveSupport::Autoload
+
+ autoload :Attribute
+ autoload :Base
+ autoload :Callbacks
+ autoload :Core
+ autoload :ConnectionHandling
+ autoload :CounterCache
+ autoload :DynamicMatchers
+ autoload :Enum
+ autoload :InternalMetadata
+ autoload :Explain
+ autoload :Inheritance
+ autoload :Integration
+ autoload :Migration
+ autoload :Migrator, "active_record/migration"
+ autoload :ModelSchema
+ autoload :NestedAttributes
+ autoload :NoTouching
+ autoload :TouchLater
+ autoload :Persistence
+ autoload :QueryCache
+ autoload :Querying
+ autoload :CollectionCacheKey
+ autoload :ReadonlyAttributes
+ autoload :RecordInvalid, "active_record/validations"
+ autoload :Reflection
+ autoload :RuntimeRegistry
+ autoload :Sanitization
+ autoload :Schema
+ autoload :SchemaDumper
+ autoload :SchemaMigration
+ autoload :Scoping
+ autoload :Serialization
+ autoload :StatementCache
+ autoload :Store
+ autoload :Suppressor
+ autoload :Timestamp
+ autoload :Transactions
+ autoload :Translation
+ autoload :Validations
+ autoload :SecureToken
+
+ eager_autoload do
+ autoload :ActiveRecordError, "active_record/errors"
+ autoload :ConnectionNotEstablished, "active_record/errors"
+ autoload :ConnectionAdapters, "active_record/connection_adapters/abstract_adapter"
+
+ autoload :Aggregations
+ autoload :Associations
+ autoload :AttributeAssignment
+ autoload :AttributeMethods
+ autoload :AutosaveAssociation
+
+ autoload :LegacyYamlAdapter
+
+ autoload :Relation
+ autoload :AssociationRelation
+ autoload :NullRelation
+
+ autoload_under "relation" do
+ autoload :QueryMethods
+ autoload :FinderMethods
+ autoload :Calculations
+ autoload :PredicateBuilder
+ autoload :SpawnMethods
+ autoload :Batches
+ autoload :Delegation
+ end
+
+ autoload :Result
+ autoload :TableMetadata
+ end
+
+ module Coders
+ autoload :YAMLColumn, "active_record/coders/yaml_column"
+ autoload :JSON, "active_record/coders/json"
+ end
+
+ module AttributeMethods
+ extend ActiveSupport::Autoload
+
+ eager_autoload do
+ autoload :BeforeTypeCast
+ autoload :Dirty
+ autoload :PrimaryKey
+ autoload :Query
+ autoload :Read
+ autoload :TimeZoneConversion
+ autoload :Write
+ autoload :Serialization
+ end
+ end
+
+ module Locking
+ extend ActiveSupport::Autoload
+
+ eager_autoload do
+ autoload :Optimistic
+ autoload :Pessimistic
+ end
+ end
+
+ module ConnectionAdapters
+ extend ActiveSupport::Autoload
+
+ eager_autoload do
+ autoload :AbstractAdapter
+ end
+ end
+
+ module Scoping
+ extend ActiveSupport::Autoload
+
+ eager_autoload do
+ autoload :Named
+ autoload :Default
+ end
+ end
+
+ module Tasks
+ extend ActiveSupport::Autoload
+
+ autoload :DatabaseTasks
+ autoload :SQLiteDatabaseTasks, "active_record/tasks/sqlite_database_tasks"
+ autoload :MySQLDatabaseTasks, "active_record/tasks/mysql_database_tasks"
+ autoload :PostgreSQLDatabaseTasks,
+ "active_record/tasks/postgresql_database_tasks"
+ end
+
+ autoload :TestFixtures, "active_record/fixtures"
+
+ def self.eager_load!
+ super
+ ActiveRecord::Locking.eager_load!
+ ActiveRecord::Scoping.eager_load!
+ ActiveRecord::Associations.eager_load!
+ ActiveRecord::AttributeMethods.eager_load!
+ ActiveRecord::ConnectionAdapters.eager_load!
+ end
+end
+
+ActiveSupport.on_load(:active_record) do
+ Arel::Table.engine = self
+end
+
+ActiveSupport.on_load(:i18n) do
+ I18n.load_path << File.expand_path("active_record/locale/en.yml", __dir__)
+end
diff --git a/activerecord/lib/active_record/aggregations.rb b/activerecord/lib/active_record/aggregations.rb
new file mode 100644
index 0000000000..e5e89734d2
--- /dev/null
+++ b/activerecord/lib/active_record/aggregations.rb
@@ -0,0 +1,284 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # See ActiveRecord::Aggregations::ClassMethods for documentation
+ module Aggregations
+ extend ActiveSupport::Concern
+
+ def initialize_dup(*) # :nodoc:
+ @aggregation_cache = {}
+ super
+ end
+
+ def reload(*) # :nodoc:
+ clear_aggregation_cache
+ super
+ end
+
+ private
+
+ def clear_aggregation_cache
+ @aggregation_cache.clear if persisted?
+ end
+
+ def init_internals
+ @aggregation_cache = {}
+ super
+ end
+
+ # Active Record implements aggregation through a macro-like class method called #composed_of
+ # for representing attributes as value objects. It expresses relationships like "Account [is]
+ # composed of Money [among other things]" or "Person [is] composed of [an] address". Each call
+ # to the macro adds a description of how the value objects are created from the attributes of
+ # the entity object (when the entity is initialized either as a new object or from finding an
+ # existing object) and how it can be turned back into attributes (when the entity is saved to
+ # the database).
+ #
+ # class Customer < ActiveRecord::Base
+ # composed_of :balance, class_name: "Money", mapping: %w(amount currency)
+ # composed_of :address, mapping: [ %w(address_street street), %w(address_city city) ]
+ # end
+ #
+ # The customer class now has the following methods to manipulate the value objects:
+ # * <tt>Customer#balance, Customer#balance=(money)</tt>
+ # * <tt>Customer#address, Customer#address=(address)</tt>
+ #
+ # These methods will operate with value objects like the ones described below:
+ #
+ # class Money
+ # include Comparable
+ # attr_reader :amount, :currency
+ # EXCHANGE_RATES = { "USD_TO_DKK" => 6 }
+ #
+ # def initialize(amount, currency = "USD")
+ # @amount, @currency = amount, currency
+ # end
+ #
+ # def exchange_to(other_currency)
+ # exchanged_amount = (amount * EXCHANGE_RATES["#{currency}_TO_#{other_currency}"]).floor
+ # Money.new(exchanged_amount, other_currency)
+ # end
+ #
+ # def ==(other_money)
+ # amount == other_money.amount && currency == other_money.currency
+ # end
+ #
+ # def <=>(other_money)
+ # if currency == other_money.currency
+ # amount <=> other_money.amount
+ # else
+ # amount <=> other_money.exchange_to(currency).amount
+ # end
+ # end
+ # end
+ #
+ # class Address
+ # attr_reader :street, :city
+ # def initialize(street, city)
+ # @street, @city = street, city
+ # end
+ #
+ # def close_to?(other_address)
+ # city == other_address.city
+ # end
+ #
+ # def ==(other_address)
+ # city == other_address.city && street == other_address.street
+ # end
+ # end
+ #
+ # Now it's possible to access attributes from the database through the value objects instead. If
+ # you choose to name the composition the same as the attribute's name, it will be the only way to
+ # access that attribute. That's the case with our +balance+ attribute. You interact with the value
+ # objects just like you would with any other attribute:
+ #
+ # customer.balance = Money.new(20) # sets the Money value object and the attribute
+ # customer.balance # => Money value object
+ # customer.balance.exchange_to("DKK") # => Money.new(120, "DKK")
+ # customer.balance > Money.new(10) # => true
+ # customer.balance == Money.new(20) # => true
+ # customer.balance < Money.new(5) # => false
+ #
+ # Value objects can also be composed of multiple attributes, such as the case of Address. The order
+ # of the mappings will determine the order of the parameters.
+ #
+ # customer.address_street = "Hyancintvej"
+ # customer.address_city = "Copenhagen"
+ # customer.address # => Address.new("Hyancintvej", "Copenhagen")
+ #
+ # customer.address = Address.new("May Street", "Chicago")
+ # customer.address_street # => "May Street"
+ # customer.address_city # => "Chicago"
+ #
+ # == Writing value objects
+ #
+ # Value objects are immutable and interchangeable objects that represent a given value, such as
+ # a Money object representing $5. Two Money objects both representing $5 should be equal (through
+ # methods such as <tt>==</tt> and <tt><=></tt> from Comparable if ranking makes sense). This is
+ # unlike entity objects where equality is determined by identity. An entity class such as Customer can
+ # easily have two different objects that both have an address on Hyancintvej. Entity identity is
+ # determined by object or relational unique identifiers (such as primary keys). Normal
+ # ActiveRecord::Base classes are entity objects.
+ #
+ # It's also important to treat the value objects as immutable. Don't allow the Money object to have
+ # its amount changed after creation. Create a new Money object with the new value instead. The
+ # <tt>Money#exchange_to</tt> method is an example of this. It returns a new value object instead of changing
+ # its own values. Active Record won't persist value objects that have been changed through means
+ # other than the writer method.
+ #
+ # The immutable requirement is enforced by Active Record by freezing any object assigned as a value
+ # object. Attempting to change it afterwards will result in a +RuntimeError+.
+ #
+ # Read more about value objects on http://c2.com/cgi/wiki?ValueObject and on the dangers of not
+ # keeping value objects immutable on http://c2.com/cgi/wiki?ValueObjectsShouldBeImmutable
+ #
+ # == Custom constructors and converters
+ #
+ # By default value objects are initialized by calling the <tt>new</tt> constructor of the value
+ # class passing each of the mapped attributes, in the order specified by the <tt>:mapping</tt>
+ # option, as arguments. If the value class doesn't support this convention then #composed_of allows
+ # a custom constructor to be specified.
+ #
+ # When a new value is assigned to the value object, the default assumption is that the new value
+ # is an instance of the value class. Specifying a custom converter allows the new value to be automatically
+ # converted to an instance of value class if necessary.
+ #
+ # For example, the +NetworkResource+ model has +network_address+ and +cidr_range+ attributes that should be
+ # aggregated using the +NetAddr::CIDR+ value class (http://www.rubydoc.info/gems/netaddr/1.5.0/NetAddr/CIDR).
+ # The constructor for the value class is called +create+ and it expects a CIDR address string as a parameter.
+ # New values can be assigned to the value object using either another +NetAddr::CIDR+ object, a string
+ # or an array. The <tt>:constructor</tt> and <tt>:converter</tt> options can be used to meet
+ # these requirements:
+ #
+ # class NetworkResource < ActiveRecord::Base
+ # composed_of :cidr,
+ # class_name: 'NetAddr::CIDR',
+ # mapping: [ %w(network_address network), %w(cidr_range bits) ],
+ # allow_nil: true,
+ # constructor: Proc.new { |network_address, cidr_range| NetAddr::CIDR.create("#{network_address}/#{cidr_range}") },
+ # converter: Proc.new { |value| NetAddr::CIDR.create(value.is_a?(Array) ? value.join('/') : value) }
+ # end
+ #
+ # # This calls the :constructor
+ # network_resource = NetworkResource.new(network_address: '192.168.0.1', cidr_range: 24)
+ #
+ # # These assignments will both use the :converter
+ # network_resource.cidr = [ '192.168.2.1', 8 ]
+ # network_resource.cidr = '192.168.0.1/24'
+ #
+ # # This assignment won't use the :converter as the value is already an instance of the value class
+ # network_resource.cidr = NetAddr::CIDR.create('192.168.2.1/8')
+ #
+ # # Saving and then reloading will use the :constructor on reload
+ # network_resource.save
+ # network_resource.reload
+ #
+ # == Finding records by a value object
+ #
+ # Once a #composed_of relationship is specified for a model, records can be loaded from the database
+ # by specifying an instance of the value object in the conditions hash. The following example
+ # finds all customers with +balance_amount+ equal to 20 and +balance_currency+ equal to "USD":
+ #
+ # Customer.where(balance: Money.new(20, "USD"))
+ #
+ module ClassMethods
+ # Adds reader and writer methods for manipulating a value object:
+ # <tt>composed_of :address</tt> adds <tt>address</tt> and <tt>address=(new_address)</tt> methods.
+ #
+ # Options are:
+ # * <tt>:class_name</tt> - Specifies the class name of the association. Use it only if that name
+ # can't be inferred from the part id. So <tt>composed_of :address</tt> will by default be linked
+ # to the Address class, but if the real class name is +CompanyAddress+, you'll have to specify it
+ # with this option.
+ # * <tt>:mapping</tt> - Specifies the mapping of entity attributes to attributes of the value
+ # object. Each mapping is represented as an array where the first item is the name of the
+ # entity attribute and the second item is the name of the attribute in the value object. The
+ # order in which mappings are defined determines the order in which attributes are sent to the
+ # value class constructor.
+ # * <tt>:allow_nil</tt> - Specifies that the value object will not be instantiated when all mapped
+ # attributes are +nil+. Setting the value object to +nil+ has the effect of writing +nil+ to all
+ # mapped attributes.
+ # This defaults to +false+.
+ # * <tt>:constructor</tt> - A symbol specifying the name of the constructor method or a Proc that
+ # is called to initialize the value object. The constructor is passed all of the mapped attributes,
+ # in the order that they are defined in the <tt>:mapping option</tt>, as arguments and uses them
+ # to instantiate a <tt>:class_name</tt> object.
+ # The default is <tt>:new</tt>.
+ # * <tt>:converter</tt> - A symbol specifying the name of a class method of <tt>:class_name</tt>
+ # or a Proc that is called when a new value is assigned to the value object. The converter is
+ # passed the single value that is used in the assignment and is only called if the new value is
+ # not an instance of <tt>:class_name</tt>. If <tt>:allow_nil</tt> is set to true, the converter
+ # can return +nil+ to skip the assignment.
+ #
+ # Option examples:
+ # composed_of :temperature, mapping: %w(reading celsius)
+ # composed_of :balance, class_name: "Money", mapping: %w(balance amount),
+ # converter: Proc.new { |balance| balance.to_money }
+ # composed_of :address, mapping: [ %w(address_street street), %w(address_city city) ]
+ # composed_of :gps_location
+ # composed_of :gps_location, allow_nil: true
+ # composed_of :ip_address,
+ # class_name: 'IPAddr',
+ # mapping: %w(ip to_i),
+ # constructor: Proc.new { |ip| IPAddr.new(ip, Socket::AF_INET) },
+ # converter: Proc.new { |ip| ip.is_a?(Integer) ? IPAddr.new(ip, Socket::AF_INET) : IPAddr.new(ip.to_s) }
+ #
+ def composed_of(part_id, options = {})
+ options.assert_valid_keys(:class_name, :mapping, :allow_nil, :constructor, :converter)
+
+ name = part_id.id2name
+ class_name = options[:class_name] || name.camelize
+ mapping = options[:mapping] || [ name, name ]
+ mapping = [ mapping ] unless mapping.first.is_a?(Array)
+ allow_nil = options[:allow_nil] || false
+ constructor = options[:constructor] || :new
+ converter = options[:converter]
+
+ reader_method(name, class_name, mapping, allow_nil, constructor)
+ writer_method(name, class_name, mapping, allow_nil, converter)
+
+ reflection = ActiveRecord::Reflection.create(:composed_of, part_id, nil, options, self)
+ Reflection.add_aggregate_reflection self, part_id, reflection
+ end
+
+ private
+ def reader_method(name, class_name, mapping, allow_nil, constructor)
+ define_method(name) do
+ if @aggregation_cache[name].nil? && (!allow_nil || mapping.any? { |key, _| !_read_attribute(key).nil? })
+ attrs = mapping.collect { |key, _| _read_attribute(key) }
+ object = constructor.respond_to?(:call) ?
+ constructor.call(*attrs) :
+ class_name.constantize.send(constructor, *attrs)
+ @aggregation_cache[name] = object
+ end
+ @aggregation_cache[name]
+ end
+ end
+
+ def writer_method(name, class_name, mapping, allow_nil, converter)
+ define_method("#{name}=") do |part|
+ klass = class_name.constantize
+
+ unless part.is_a?(klass) || converter.nil? || part.nil?
+ part = converter.respond_to?(:call) ? converter.call(part) : klass.send(converter, part)
+ end
+
+ hash_from_multiparameter_assignment = part.is_a?(Hash) &&
+ part.each_key.all? { |k| k.is_a?(Integer) }
+ if hash_from_multiparameter_assignment
+ raise ArgumentError unless part.size == part.each_key.max
+ part = klass.new(*part.sort.map(&:last))
+ end
+
+ if part.nil? && allow_nil
+ mapping.each { |key, _| self[key] = nil }
+ @aggregation_cache[name] = nil
+ else
+ mapping.each { |key, value| self[key] = part.send(value) }
+ @aggregation_cache[name] = part.freeze
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/association_relation.rb b/activerecord/lib/active_record/association_relation.rb
new file mode 100644
index 0000000000..2b0b2864bc
--- /dev/null
+++ b/activerecord/lib/active_record/association_relation.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class AssociationRelation < Relation
+ def initialize(klass, table, predicate_builder, association)
+ super(klass, table, predicate_builder)
+ @association = association
+ end
+
+ def proxy_association
+ @association
+ end
+
+ def ==(other)
+ other == records
+ end
+
+ def build(*args, &block)
+ scoping { @association.build(*args, &block) }
+ end
+ alias new build
+
+ def create(*args, &block)
+ scoping { @association.create(*args, &block) }
+ end
+
+ def create!(*args, &block)
+ scoping { @association.create!(*args, &block) }
+ end
+
+ private
+
+ def exec_queries
+ super do |r|
+ @association.set_inverse_instance r
+ yield r if block_given?
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations.rb b/activerecord/lib/active_record/associations.rb
new file mode 100644
index 0000000000..840f71bef2
--- /dev/null
+++ b/activerecord/lib/active_record/associations.rb
@@ -0,0 +1,1869 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/enumerable"
+require "active_support/core_ext/string/conversions"
+require "active_support/core_ext/module/remove_method"
+require_relative "errors"
+
+module ActiveRecord
+ class AssociationNotFoundError < ConfigurationError #:nodoc:
+ def initialize(record = nil, association_name = nil)
+ if record && association_name
+ super("Association named '#{association_name}' was not found on #{record.class.name}; perhaps you misspelled it?")
+ else
+ super("Association was not found.")
+ end
+ end
+ end
+
+ class InverseOfAssociationNotFoundError < ActiveRecordError #:nodoc:
+ def initialize(reflection = nil, associated_class = nil)
+ if reflection
+ super("Could not find the inverse association for #{reflection.name} (#{reflection.options[:inverse_of].inspect} in #{associated_class.nil? ? reflection.class_name : associated_class.name})")
+ else
+ super("Could not find the inverse association.")
+ end
+ end
+ end
+
+ class HasManyThroughAssociationNotFoundError < ActiveRecordError #:nodoc:
+ def initialize(owner_class_name = nil, reflection = nil)
+ if owner_class_name && reflection
+ super("Could not find the association #{reflection.options[:through].inspect} in model #{owner_class_name}")
+ else
+ super("Could not find the association.")
+ end
+ end
+ end
+
+ class HasManyThroughAssociationPolymorphicSourceError < ActiveRecordError #:nodoc:
+ def initialize(owner_class_name = nil, reflection = nil, source_reflection = nil)
+ if owner_class_name && reflection && source_reflection
+ super("Cannot have a has_many :through association '#{owner_class_name}##{reflection.name}' on the polymorphic object '#{source_reflection.class_name}##{source_reflection.name}' without 'source_type'. Try adding 'source_type: \"#{reflection.name.to_s.classify}\"' to 'has_many :through' definition.")
+ else
+ super("Cannot have a has_many :through association.")
+ end
+ end
+ end
+
+ class HasManyThroughAssociationPolymorphicThroughError < ActiveRecordError #:nodoc:
+ def initialize(owner_class_name = nil, reflection = nil)
+ if owner_class_name && reflection
+ super("Cannot have a has_many :through association '#{owner_class_name}##{reflection.name}' which goes through the polymorphic association '#{owner_class_name}##{reflection.through_reflection.name}'.")
+ else
+ super("Cannot have a has_many :through association.")
+ end
+ end
+ end
+
+ class HasManyThroughAssociationPointlessSourceTypeError < ActiveRecordError #:nodoc:
+ def initialize(owner_class_name = nil, reflection = nil, source_reflection = nil)
+ if owner_class_name && reflection && source_reflection
+ super("Cannot have a has_many :through association '#{owner_class_name}##{reflection.name}' with a :source_type option if the '#{reflection.through_reflection.class_name}##{source_reflection.name}' is not polymorphic. Try removing :source_type on your association.")
+ else
+ super("Cannot have a has_many :through association.")
+ end
+ end
+ end
+
+ class HasOneThroughCantAssociateThroughCollection < ActiveRecordError #:nodoc:
+ def initialize(owner_class_name = nil, reflection = nil, through_reflection = nil)
+ if owner_class_name && reflection && through_reflection
+ super("Cannot have a has_one :through association '#{owner_class_name}##{reflection.name}' where the :through association '#{owner_class_name}##{through_reflection.name}' is a collection. Specify a has_one or belongs_to association in the :through option instead.")
+ else
+ super("Cannot have a has_one :through association.")
+ end
+ end
+ end
+
+ class HasOneAssociationPolymorphicThroughError < ActiveRecordError #:nodoc:
+ def initialize(owner_class_name = nil, reflection = nil)
+ if owner_class_name && reflection
+ super("Cannot have a has_one :through association '#{owner_class_name}##{reflection.name}' which goes through the polymorphic association '#{owner_class_name}##{reflection.through_reflection.name}'.")
+ else
+ super("Cannot have a has_one :through association.")
+ end
+ end
+ end
+
+ class HasManyThroughSourceAssociationNotFoundError < ActiveRecordError #:nodoc:
+ def initialize(reflection = nil)
+ if reflection
+ through_reflection = reflection.through_reflection
+ source_reflection_names = reflection.source_reflection_names
+ source_associations = reflection.through_reflection.klass._reflections.keys
+ super("Could not find the source association(s) #{source_reflection_names.collect(&:inspect).to_sentence(two_words_connector: ' or ', last_word_connector: ', or ', locale: :en)} in model #{through_reflection.klass}. Try 'has_many #{reflection.name.inspect}, :through => #{through_reflection.name.inspect}, :source => <name>'. Is it one of #{source_associations.to_sentence(two_words_connector: ' or ', last_word_connector: ', or ', locale: :en)}?")
+ else
+ super("Could not find the source association(s).")
+ end
+ end
+ end
+
+ class HasManyThroughOrderError < ActiveRecordError #:nodoc:
+ def initialize(owner_class_name = nil, reflection = nil, through_reflection = nil)
+ if owner_class_name && reflection && through_reflection
+ super("Cannot have a has_many :through association '#{owner_class_name}##{reflection.name}' which goes through '#{owner_class_name}##{through_reflection.name}' before the through association is defined.")
+ else
+ super("Cannot have a has_many :through association before the through association is defined.")
+ end
+ end
+ end
+
+ class ThroughCantAssociateThroughHasOneOrManyReflection < ActiveRecordError #:nodoc:
+ def initialize(owner = nil, reflection = nil)
+ if owner && reflection
+ super("Cannot modify association '#{owner.class.name}##{reflection.name}' because the source reflection class '#{reflection.source_reflection.class_name}' is associated to '#{reflection.through_reflection.class_name}' via :#{reflection.source_reflection.macro}.")
+ else
+ super("Cannot modify association.")
+ end
+ end
+ end
+
+ class AmbiguousSourceReflectionForThroughAssociation < ActiveRecordError # :nodoc:
+ def initialize(klass, macro, association_name, options, possible_sources)
+ example_options = options.dup
+ example_options[:source] = possible_sources.first
+
+ super("Ambiguous source reflection for through association. Please " \
+ "specify a :source directive on your declaration like:\n" \
+ "\n" \
+ " class #{klass} < ActiveRecord::Base\n" \
+ " #{macro} :#{association_name}, #{example_options}\n" \
+ " end"
+ )
+ end
+ end
+
+ class HasManyThroughCantAssociateThroughHasOneOrManyReflection < ThroughCantAssociateThroughHasOneOrManyReflection #:nodoc:
+ end
+
+ class HasOneThroughCantAssociateThroughHasOneOrManyReflection < ThroughCantAssociateThroughHasOneOrManyReflection #:nodoc:
+ end
+
+ class HasManyThroughCantAssociateNewRecords < ActiveRecordError #:nodoc:
+ def initialize(owner = nil, reflection = nil)
+ if owner && reflection
+ super("Cannot associate new records through '#{owner.class.name}##{reflection.name}' on '#{reflection.source_reflection.class_name rescue nil}##{reflection.source_reflection.name rescue nil}'. Both records must have an id in order to create the has_many :through record associating them.")
+ else
+ super("Cannot associate new records.")
+ end
+ end
+ end
+
+ class HasManyThroughCantDissociateNewRecords < ActiveRecordError #:nodoc:
+ def initialize(owner = nil, reflection = nil)
+ if owner && reflection
+ super("Cannot dissociate new records through '#{owner.class.name}##{reflection.name}' on '#{reflection.source_reflection.class_name rescue nil}##{reflection.source_reflection.name rescue nil}'. Both records must have an id in order to delete the has_many :through record associating them.")
+ else
+ super("Cannot dissociate new records.")
+ end
+ end
+ end
+
+ class ThroughNestedAssociationsAreReadonly < ActiveRecordError #:nodoc:
+ def initialize(owner = nil, reflection = nil)
+ if owner && reflection
+ super("Cannot modify association '#{owner.class.name}##{reflection.name}' because it goes through more than one other association.")
+ else
+ super("Through nested associations are read-only.")
+ end
+ end
+ end
+
+ class HasManyThroughNestedAssociationsAreReadonly < ThroughNestedAssociationsAreReadonly #:nodoc:
+ end
+
+ class HasOneThroughNestedAssociationsAreReadonly < ThroughNestedAssociationsAreReadonly #:nodoc:
+ end
+
+ # This error is raised when trying to eager load a polymorphic association using a JOIN.
+ # Eager loading polymorphic associations is only possible with
+ # {ActiveRecord::Relation#preload}[rdoc-ref:QueryMethods#preload].
+ class EagerLoadPolymorphicError < ActiveRecordError
+ def initialize(reflection = nil)
+ if reflection
+ super("Cannot eagerly load the polymorphic association #{reflection.name.inspect}")
+ else
+ super("Eager load polymorphic error.")
+ end
+ end
+ end
+
+ class ReadOnlyAssociation < ActiveRecordError #:nodoc:
+ def initialize(reflection = nil)
+ if reflection
+ super("Cannot add to a has_many :through association. Try adding to #{reflection.through_reflection.name.inspect}.")
+ else
+ super("Read-only reflection error.")
+ end
+ end
+ end
+
+ # This error is raised when trying to destroy a parent instance in N:1 or 1:1 associations
+ # (has_many, has_one) when there is at least 1 child associated instance.
+ # ex: if @project.tasks.size > 0, DeleteRestrictionError will be raised when trying to destroy @project
+ class DeleteRestrictionError < ActiveRecordError #:nodoc:
+ def initialize(name = nil)
+ if name
+ super("Cannot delete record because of dependent #{name}")
+ else
+ super("Delete restriction error.")
+ end
+ end
+ end
+
+ # See ActiveRecord::Associations::ClassMethods for documentation.
+ module Associations # :nodoc:
+ extend ActiveSupport::Autoload
+ extend ActiveSupport::Concern
+
+ # These classes will be loaded when associations are created.
+ # So there is no need to eager load them.
+ autoload :Association
+ autoload :SingularAssociation
+ autoload :CollectionAssociation
+ autoload :ForeignAssociation
+ autoload :CollectionProxy
+ autoload :ThroughAssociation
+
+ module Builder #:nodoc:
+ autoload :Association, "active_record/associations/builder/association"
+ autoload :SingularAssociation, "active_record/associations/builder/singular_association"
+ autoload :CollectionAssociation, "active_record/associations/builder/collection_association"
+
+ autoload :BelongsTo, "active_record/associations/builder/belongs_to"
+ autoload :HasOne, "active_record/associations/builder/has_one"
+ autoload :HasMany, "active_record/associations/builder/has_many"
+ autoload :HasAndBelongsToMany, "active_record/associations/builder/has_and_belongs_to_many"
+ end
+
+ eager_autoload do
+ autoload :BelongsToAssociation
+ autoload :BelongsToPolymorphicAssociation
+ autoload :HasManyAssociation
+ autoload :HasManyThroughAssociation
+ autoload :HasOneAssociation
+ autoload :HasOneThroughAssociation
+
+ autoload :Preloader
+ autoload :JoinDependency
+ autoload :AssociationScope
+ autoload :AliasTracker
+ end
+
+ def self.eager_load!
+ super
+ Preloader.eager_load!
+ end
+
+ # Returns the association instance for the given name, instantiating it if it doesn't already exist
+ def association(name) #:nodoc:
+ association = association_instance_get(name)
+
+ if association.nil?
+ unless reflection = self.class._reflect_on_association(name)
+ raise AssociationNotFoundError.new(self, name)
+ end
+ association = reflection.association_class.new(self, reflection)
+ association_instance_set(name, association)
+ end
+
+ association
+ end
+
+ def association_cached?(name) # :nodoc
+ @association_cache.key?(name)
+ end
+
+ def initialize_dup(*) # :nodoc:
+ @association_cache = {}
+ super
+ end
+
+ def reload(*) # :nodoc:
+ clear_association_cache
+ super
+ end
+
+ private
+ # Clears out the association cache.
+ def clear_association_cache
+ @association_cache.clear if persisted?
+ end
+
+ def init_internals
+ @association_cache = {}
+ super
+ end
+
+ # Returns the specified association instance if it exists, +nil+ otherwise.
+ def association_instance_get(name)
+ @association_cache[name]
+ end
+
+ # Set the specified association instance.
+ def association_instance_set(name, association)
+ @association_cache[name] = association
+ end
+
+ # \Associations are a set of macro-like class methods for tying objects together through
+ # foreign keys. They express relationships like "Project has one Project Manager"
+ # or "Project belongs to a Portfolio". Each macro adds a number of methods to the
+ # class which are specialized according to the collection or association symbol and the
+ # options hash. It works much the same way as Ruby's own <tt>attr*</tt>
+ # methods.
+ #
+ # class Project < ActiveRecord::Base
+ # belongs_to :portfolio
+ # has_one :project_manager
+ # has_many :milestones
+ # has_and_belongs_to_many :categories
+ # end
+ #
+ # The project class now has the following methods (and more) to ease the traversal and
+ # manipulation of its relationships:
+ # * <tt>Project#portfolio, Project#portfolio=(portfolio), Project#portfolio.nil?</tt>
+ # * <tt>Project#project_manager, Project#project_manager=(project_manager), Project#project_manager.nil?,</tt>
+ # * <tt>Project#milestones.empty?, Project#milestones.size, Project#milestones, Project#milestones<<(milestone),</tt>
+ # <tt>Project#milestones.delete(milestone), Project#milestones.destroy(milestone), Project#milestones.find(milestone_id),</tt>
+ # <tt>Project#milestones.build, Project#milestones.create</tt>
+ # * <tt>Project#categories.empty?, Project#categories.size, Project#categories, Project#categories<<(category1),</tt>
+ # <tt>Project#categories.delete(category1), Project#categories.destroy(category1)</tt>
+ #
+ # === A word of warning
+ #
+ # Don't create associations that have the same name as {instance methods}[rdoc-ref:ActiveRecord::Core] of
+ # <tt>ActiveRecord::Base</tt>. Since the association adds a method with that name to
+ # its model, using an association with the same name as one provided by <tt>ActiveRecord::Base</tt> will override the method inherited through <tt>ActiveRecord::Base</tt> and will break things.
+ # For instance, +attributes+ and +connection+ would be bad choices for association names, because those names already exist in the list of <tt>ActiveRecord::Base</tt> instance methods.
+ #
+ # == Auto-generated methods
+ # See also Instance Public methods below for more details.
+ #
+ # === Singular associations (one-to-one)
+ # | | belongs_to |
+ # generated methods | belongs_to | :polymorphic | has_one
+ # ----------------------------------+------------+--------------+---------
+ # other | X | X | X
+ # other=(other) | X | X | X
+ # build_other(attributes={}) | X | | X
+ # create_other(attributes={}) | X | | X
+ # create_other!(attributes={}) | X | | X
+ #
+ # === Collection associations (one-to-many / many-to-many)
+ # | | | has_many
+ # generated methods | habtm | has_many | :through
+ # ----------------------------------+-------+----------+----------
+ # others | X | X | X
+ # others=(other,other,...) | X | X | X
+ # other_ids | X | X | X
+ # other_ids=(id,id,...) | X | X | X
+ # others<< | X | X | X
+ # others.push | X | X | X
+ # others.concat | X | X | X
+ # others.build(attributes={}) | X | X | X
+ # others.create(attributes={}) | X | X | X
+ # others.create!(attributes={}) | X | X | X
+ # others.size | X | X | X
+ # others.length | X | X | X
+ # others.count | X | X | X
+ # others.sum(*args) | X | X | X
+ # others.empty? | X | X | X
+ # others.clear | X | X | X
+ # others.delete(other,other,...) | X | X | X
+ # others.delete_all | X | X | X
+ # others.destroy(other,other,...) | X | X | X
+ # others.destroy_all | X | X | X
+ # others.find(*args) | X | X | X
+ # others.exists? | X | X | X
+ # others.distinct | X | X | X
+ # others.reset | X | X | X
+ #
+ # === Overriding generated methods
+ #
+ # Association methods are generated in a module included into the model
+ # class, making overrides easy. The original generated method can thus be
+ # called with +super+:
+ #
+ # class Car < ActiveRecord::Base
+ # belongs_to :owner
+ # belongs_to :old_owner
+ #
+ # def owner=(new_owner)
+ # self.old_owner = self.owner
+ # super
+ # end
+ # end
+ #
+ # The association methods module is included immediately after the
+ # generated attributes methods module, meaning an association will
+ # override the methods for an attribute with the same name.
+ #
+ # == Cardinality and associations
+ #
+ # Active Record associations can be used to describe one-to-one, one-to-many and many-to-many
+ # relationships between models. Each model uses an association to describe its role in
+ # the relation. The #belongs_to association is always used in the model that has
+ # the foreign key.
+ #
+ # === One-to-one
+ #
+ # Use #has_one in the base, and #belongs_to in the associated model.
+ #
+ # class Employee < ActiveRecord::Base
+ # has_one :office
+ # end
+ # class Office < ActiveRecord::Base
+ # belongs_to :employee # foreign key - employee_id
+ # end
+ #
+ # === One-to-many
+ #
+ # Use #has_many in the base, and #belongs_to in the associated model.
+ #
+ # class Manager < ActiveRecord::Base
+ # has_many :employees
+ # end
+ # class Employee < ActiveRecord::Base
+ # belongs_to :manager # foreign key - manager_id
+ # end
+ #
+ # === Many-to-many
+ #
+ # There are two ways to build a many-to-many relationship.
+ #
+ # The first way uses a #has_many association with the <tt>:through</tt> option and a join model, so
+ # there are two stages of associations.
+ #
+ # class Assignment < ActiveRecord::Base
+ # belongs_to :programmer # foreign key - programmer_id
+ # belongs_to :project # foreign key - project_id
+ # end
+ # class Programmer < ActiveRecord::Base
+ # has_many :assignments
+ # has_many :projects, through: :assignments
+ # end
+ # class Project < ActiveRecord::Base
+ # has_many :assignments
+ # has_many :programmers, through: :assignments
+ # end
+ #
+ # For the second way, use #has_and_belongs_to_many in both models. This requires a join table
+ # that has no corresponding model or primary key.
+ #
+ # class Programmer < ActiveRecord::Base
+ # has_and_belongs_to_many :projects # foreign keys in the join table
+ # end
+ # class Project < ActiveRecord::Base
+ # has_and_belongs_to_many :programmers # foreign keys in the join table
+ # end
+ #
+ # Choosing which way to build a many-to-many relationship is not always simple.
+ # If you need to work with the relationship model as its own entity,
+ # use #has_many <tt>:through</tt>. Use #has_and_belongs_to_many when working with legacy schemas or when
+ # you never work directly with the relationship itself.
+ #
+ # == Is it a #belongs_to or #has_one association?
+ #
+ # Both express a 1-1 relationship. The difference is mostly where to place the foreign
+ # key, which goes on the table for the class declaring the #belongs_to relationship.
+ #
+ # class User < ActiveRecord::Base
+ # # I reference an account.
+ # belongs_to :account
+ # end
+ #
+ # class Account < ActiveRecord::Base
+ # # One user references me.
+ # has_one :user
+ # end
+ #
+ # The tables for these classes could look something like:
+ #
+ # CREATE TABLE users (
+ # id int NOT NULL auto_increment,
+ # account_id int default NULL,
+ # name varchar default NULL,
+ # PRIMARY KEY (id)
+ # )
+ #
+ # CREATE TABLE accounts (
+ # id int NOT NULL auto_increment,
+ # name varchar default NULL,
+ # PRIMARY KEY (id)
+ # )
+ #
+ # == Unsaved objects and associations
+ #
+ # You can manipulate objects and associations before they are saved to the database, but
+ # there is some special behavior you should be aware of, mostly involving the saving of
+ # associated objects.
+ #
+ # You can set the <tt>:autosave</tt> option on a #has_one, #belongs_to,
+ # #has_many, or #has_and_belongs_to_many association. Setting it
+ # to +true+ will _always_ save the members, whereas setting it to +false+ will
+ # _never_ save the members. More details about <tt>:autosave</tt> option is available at
+ # AutosaveAssociation.
+ #
+ # === One-to-one associations
+ #
+ # * Assigning an object to a #has_one association automatically saves that object and
+ # the object being replaced (if there is one), in order to update their foreign
+ # keys - except if the parent object is unsaved (<tt>new_record? == true</tt>).
+ # * If either of these saves fail (due to one of the objects being invalid), an
+ # ActiveRecord::RecordNotSaved exception is raised and the assignment is
+ # cancelled.
+ # * If you wish to assign an object to a #has_one association without saving it,
+ # use the <tt>#build_association</tt> method (documented below). The object being
+ # replaced will still be saved to update its foreign key.
+ # * Assigning an object to a #belongs_to association does not save the object, since
+ # the foreign key field belongs on the parent. It does not save the parent either.
+ #
+ # === Collections
+ #
+ # * Adding an object to a collection (#has_many or #has_and_belongs_to_many) automatically
+ # saves that object, except if the parent object (the owner of the collection) is not yet
+ # stored in the database.
+ # * If saving any of the objects being added to a collection (via <tt>push</tt> or similar)
+ # fails, then <tt>push</tt> returns +false+.
+ # * If saving fails while replacing the collection (via <tt>association=</tt>), an
+ # ActiveRecord::RecordNotSaved exception is raised and the assignment is
+ # cancelled.
+ # * You can add an object to a collection without automatically saving it by using the
+ # <tt>collection.build</tt> method (documented below).
+ # * All unsaved (<tt>new_record? == true</tt>) members of the collection are automatically
+ # saved when the parent is saved.
+ #
+ # == Customizing the query
+ #
+ # \Associations are built from <tt>Relation</tt> objects, and you can use the Relation syntax
+ # to customize them. For example, to add a condition:
+ #
+ # class Blog < ActiveRecord::Base
+ # has_many :published_posts, -> { where(published: true) }, class_name: 'Post'
+ # end
+ #
+ # Inside the <tt>-> { ... }</tt> block you can use all of the usual Relation methods.
+ #
+ # === Accessing the owner object
+ #
+ # Sometimes it is useful to have access to the owner object when building the query. The owner
+ # is passed as a parameter to the block. For example, the following association would find all
+ # events that occur on the user's birthday:
+ #
+ # class User < ActiveRecord::Base
+ # has_many :birthday_events, ->(user) { where(starts_on: user.birthday) }, class_name: 'Event'
+ # end
+ #
+ # Note: Joining, eager loading and preloading of these associations is not fully possible.
+ # These operations happen before instance creation and the scope will be called with a +nil+ argument.
+ # This can lead to unexpected behavior and is deprecated.
+ #
+ # == Association callbacks
+ #
+ # Similar to the normal callbacks that hook into the life cycle of an Active Record object,
+ # you can also define callbacks that get triggered when you add an object to or remove an
+ # object from an association collection.
+ #
+ # class Project
+ # has_and_belongs_to_many :developers, after_add: :evaluate_velocity
+ #
+ # def evaluate_velocity(developer)
+ # ...
+ # end
+ # end
+ #
+ # It's possible to stack callbacks by passing them as an array. Example:
+ #
+ # class Project
+ # has_and_belongs_to_many :developers,
+ # after_add: [:evaluate_velocity, Proc.new { |p, d| p.shipping_date = Time.now}]
+ # end
+ #
+ # Possible callbacks are: +before_add+, +after_add+, +before_remove+ and +after_remove+.
+ #
+ # If any of the +before_add+ callbacks throw an exception, the object will not be
+ # added to the collection.
+ #
+ # Similarly, if any of the +before_remove+ callbacks throw an exception, the object
+ # will not be removed from the collection.
+ #
+ # == Association extensions
+ #
+ # The proxy objects that control the access to associations can be extended through anonymous
+ # modules. This is especially beneficial for adding new finders, creators, and other
+ # factory-type methods that are only used as part of this association.
+ #
+ # class Account < ActiveRecord::Base
+ # has_many :people do
+ # def find_or_create_by_name(name)
+ # first_name, last_name = name.split(" ", 2)
+ # find_or_create_by(first_name: first_name, last_name: last_name)
+ # end
+ # end
+ # end
+ #
+ # person = Account.first.people.find_or_create_by_name("David Heinemeier Hansson")
+ # person.first_name # => "David"
+ # person.last_name # => "Heinemeier Hansson"
+ #
+ # If you need to share the same extensions between many associations, you can use a named
+ # extension module.
+ #
+ # module FindOrCreateByNameExtension
+ # def find_or_create_by_name(name)
+ # first_name, last_name = name.split(" ", 2)
+ # find_or_create_by(first_name: first_name, last_name: last_name)
+ # end
+ # end
+ #
+ # class Account < ActiveRecord::Base
+ # has_many :people, -> { extending FindOrCreateByNameExtension }
+ # end
+ #
+ # class Company < ActiveRecord::Base
+ # has_many :people, -> { extending FindOrCreateByNameExtension }
+ # end
+ #
+ # Some extensions can only be made to work with knowledge of the association's internals.
+ # Extensions can access relevant state using the following methods (where +items+ is the
+ # name of the association):
+ #
+ # * <tt>record.association(:items).owner</tt> - Returns the object the association is part of.
+ # * <tt>record.association(:items).reflection</tt> - Returns the reflection object that describes the association.
+ # * <tt>record.association(:items).target</tt> - Returns the associated object for #belongs_to and #has_one, or
+ # the collection of associated objects for #has_many and #has_and_belongs_to_many.
+ #
+ # However, inside the actual extension code, you will not have access to the <tt>record</tt> as
+ # above. In this case, you can access <tt>proxy_association</tt>. For example,
+ # <tt>record.association(:items)</tt> and <tt>record.items.proxy_association</tt> will return
+ # the same object, allowing you to make calls like <tt>proxy_association.owner</tt> inside
+ # association extensions.
+ #
+ # == Association Join Models
+ #
+ # Has Many associations can be configured with the <tt>:through</tt> option to use an
+ # explicit join model to retrieve the data. This operates similarly to a
+ # #has_and_belongs_to_many association. The advantage is that you're able to add validations,
+ # callbacks, and extra attributes on the join model. Consider the following schema:
+ #
+ # class Author < ActiveRecord::Base
+ # has_many :authorships
+ # has_many :books, through: :authorships
+ # end
+ #
+ # class Authorship < ActiveRecord::Base
+ # belongs_to :author
+ # belongs_to :book
+ # end
+ #
+ # @author = Author.first
+ # @author.authorships.collect { |a| a.book } # selects all books that the author's authorships belong to
+ # @author.books # selects all books by using the Authorship join model
+ #
+ # You can also go through a #has_many association on the join model:
+ #
+ # class Firm < ActiveRecord::Base
+ # has_many :clients
+ # has_many :invoices, through: :clients
+ # end
+ #
+ # class Client < ActiveRecord::Base
+ # belongs_to :firm
+ # has_many :invoices
+ # end
+ #
+ # class Invoice < ActiveRecord::Base
+ # belongs_to :client
+ # end
+ #
+ # @firm = Firm.first
+ # @firm.clients.flat_map { |c| c.invoices } # select all invoices for all clients of the firm
+ # @firm.invoices # selects all invoices by going through the Client join model
+ #
+ # Similarly you can go through a #has_one association on the join model:
+ #
+ # class Group < ActiveRecord::Base
+ # has_many :users
+ # has_many :avatars, through: :users
+ # end
+ #
+ # class User < ActiveRecord::Base
+ # belongs_to :group
+ # has_one :avatar
+ # end
+ #
+ # class Avatar < ActiveRecord::Base
+ # belongs_to :user
+ # end
+ #
+ # @group = Group.first
+ # @group.users.collect { |u| u.avatar }.compact # select all avatars for all users in the group
+ # @group.avatars # selects all avatars by going through the User join model.
+ #
+ # An important caveat with going through #has_one or #has_many associations on the
+ # join model is that these associations are *read-only*. For example, the following
+ # would not work following the previous example:
+ #
+ # @group.avatars << Avatar.new # this would work if User belonged_to Avatar rather than the other way around
+ # @group.avatars.delete(@group.avatars.last) # so would this
+ #
+ # == Setting Inverses
+ #
+ # If you are using a #belongs_to on the join model, it is a good idea to set the
+ # <tt>:inverse_of</tt> option on the #belongs_to, which will mean that the following example
+ # works correctly (where <tt>tags</tt> is a #has_many <tt>:through</tt> association):
+ #
+ # @post = Post.first
+ # @tag = @post.tags.build name: "ruby"
+ # @tag.save
+ #
+ # The last line ought to save the through record (a <tt>Tagging</tt>). This will only work if the
+ # <tt>:inverse_of</tt> is set:
+ #
+ # class Tagging < ActiveRecord::Base
+ # belongs_to :post
+ # belongs_to :tag, inverse_of: :taggings
+ # end
+ #
+ # If you do not set the <tt>:inverse_of</tt> record, the association will
+ # do its best to match itself up with the correct inverse. Automatic
+ # inverse detection only works on #has_many, #has_one, and
+ # #belongs_to associations.
+ #
+ # Extra options on the associations, as defined in the
+ # <tt>AssociationReflection::INVALID_AUTOMATIC_INVERSE_OPTIONS</tt> constant, will
+ # also prevent the association's inverse from being found automatically.
+ #
+ # The automatic guessing of the inverse association uses a heuristic based
+ # on the name of the class, so it may not work for all associations,
+ # especially the ones with non-standard names.
+ #
+ # You can turn off the automatic detection of inverse associations by setting
+ # the <tt>:inverse_of</tt> option to <tt>false</tt> like so:
+ #
+ # class Tagging < ActiveRecord::Base
+ # belongs_to :tag, inverse_of: false
+ # end
+ #
+ # == Nested \Associations
+ #
+ # You can actually specify *any* association with the <tt>:through</tt> option, including an
+ # association which has a <tt>:through</tt> option itself. For example:
+ #
+ # class Author < ActiveRecord::Base
+ # has_many :posts
+ # has_many :comments, through: :posts
+ # has_many :commenters, through: :comments
+ # end
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :comments
+ # end
+ #
+ # class Comment < ActiveRecord::Base
+ # belongs_to :commenter
+ # end
+ #
+ # @author = Author.first
+ # @author.commenters # => People who commented on posts written by the author
+ #
+ # An equivalent way of setting up this association this would be:
+ #
+ # class Author < ActiveRecord::Base
+ # has_many :posts
+ # has_many :commenters, through: :posts
+ # end
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :comments
+ # has_many :commenters, through: :comments
+ # end
+ #
+ # class Comment < ActiveRecord::Base
+ # belongs_to :commenter
+ # end
+ #
+ # When using a nested association, you will not be able to modify the association because there
+ # is not enough information to know what modification to make. For example, if you tried to
+ # add a <tt>Commenter</tt> in the example above, there would be no way to tell how to set up the
+ # intermediate <tt>Post</tt> and <tt>Comment</tt> objects.
+ #
+ # == Polymorphic \Associations
+ #
+ # Polymorphic associations on models are not restricted on what types of models they
+ # can be associated with. Rather, they specify an interface that a #has_many association
+ # must adhere to.
+ #
+ # class Asset < ActiveRecord::Base
+ # belongs_to :attachable, polymorphic: true
+ # end
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :assets, as: :attachable # The :as option specifies the polymorphic interface to use.
+ # end
+ #
+ # @asset.attachable = @post
+ #
+ # This works by using a type column in addition to a foreign key to specify the associated
+ # record. In the Asset example, you'd need an +attachable_id+ integer column and an
+ # +attachable_type+ string column.
+ #
+ # Using polymorphic associations in combination with single table inheritance (STI) is
+ # a little tricky. In order for the associations to work as expected, ensure that you
+ # store the base model for the STI models in the type column of the polymorphic
+ # association. To continue with the asset example above, suppose there are guest posts
+ # and member posts that use the posts table for STI. In this case, there must be a +type+
+ # column in the posts table.
+ #
+ # Note: The <tt>attachable_type=</tt> method is being called when assigning an +attachable+.
+ # The +class_name+ of the +attachable+ is passed as a String.
+ #
+ # class Asset < ActiveRecord::Base
+ # belongs_to :attachable, polymorphic: true
+ #
+ # def attachable_type=(class_name)
+ # super(class_name.constantize.base_class.to_s)
+ # end
+ # end
+ #
+ # class Post < ActiveRecord::Base
+ # # because we store "Post" in attachable_type now dependent: :destroy will work
+ # has_many :assets, as: :attachable, dependent: :destroy
+ # end
+ #
+ # class GuestPost < Post
+ # end
+ #
+ # class MemberPost < Post
+ # end
+ #
+ # == Caching
+ #
+ # All of the methods are built on a simple caching principle that will keep the result
+ # of the last query around unless specifically instructed not to. The cache is even
+ # shared across methods to make it even cheaper to use the macro-added methods without
+ # worrying too much about performance at the first go.
+ #
+ # project.milestones # fetches milestones from the database
+ # project.milestones.size # uses the milestone cache
+ # project.milestones.empty? # uses the milestone cache
+ # project.milestones(true).size # fetches milestones from the database
+ # project.milestones # uses the milestone cache
+ #
+ # == Eager loading of associations
+ #
+ # Eager loading is a way to find objects of a certain class and a number of named associations.
+ # It is one of the easiest ways to prevent the dreaded N+1 problem in which fetching 100
+ # posts that each need to display their author triggers 101 database queries. Through the
+ # use of eager loading, the number of queries will be reduced from 101 to 2.
+ #
+ # class Post < ActiveRecord::Base
+ # belongs_to :author
+ # has_many :comments
+ # end
+ #
+ # Consider the following loop using the class above:
+ #
+ # Post.all.each do |post|
+ # puts "Post: " + post.title
+ # puts "Written by: " + post.author.name
+ # puts "Last comment on: " + post.comments.first.created_on
+ # end
+ #
+ # To iterate over these one hundred posts, we'll generate 201 database queries. Let's
+ # first just optimize it for retrieving the author:
+ #
+ # Post.includes(:author).each do |post|
+ #
+ # This references the name of the #belongs_to association that also used the <tt>:author</tt>
+ # symbol. After loading the posts, +find+ will collect the +author_id+ from each one and load
+ # all of the referenced authors with one query. Doing so will cut down the number of queries
+ # from 201 to 102.
+ #
+ # We can improve upon the situation further by referencing both associations in the finder with:
+ #
+ # Post.includes(:author, :comments).each do |post|
+ #
+ # This will load all comments with a single query. This reduces the total number of queries
+ # to 3. In general, the number of queries will be 1 plus the number of associations
+ # named (except if some of the associations are polymorphic #belongs_to - see below).
+ #
+ # To include a deep hierarchy of associations, use a hash:
+ #
+ # Post.includes(:author, { comments: { author: :gravatar } }).each do |post|
+ #
+ # The above code will load all the comments and all of their associated
+ # authors and gravatars. You can mix and match any combination of symbols,
+ # arrays, and hashes to retrieve the associations you want to load.
+ #
+ # All of this power shouldn't fool you into thinking that you can pull out huge amounts
+ # of data with no performance penalty just because you've reduced the number of queries.
+ # The database still needs to send all the data to Active Record and it still needs to
+ # be processed. So it's no catch-all for performance problems, but it's a great way to
+ # cut down on the number of queries in a situation as the one described above.
+ #
+ # Since only one table is loaded at a time, conditions or orders cannot reference tables
+ # other than the main one. If this is the case, Active Record falls back to the previously
+ # used <tt>LEFT OUTER JOIN</tt> based strategy. For example:
+ #
+ # Post.includes([:author, :comments]).where(['comments.approved = ?', true])
+ #
+ # This will result in a single SQL query with joins along the lines of:
+ # <tt>LEFT OUTER JOIN comments ON comments.post_id = posts.id</tt> and
+ # <tt>LEFT OUTER JOIN authors ON authors.id = posts.author_id</tt>. Note that using conditions
+ # like this can have unintended consequences.
+ # In the above example, posts with no approved comments are not returned at all because
+ # the conditions apply to the SQL statement as a whole and not just to the association.
+ #
+ # You must disambiguate column references for this fallback to happen, for example
+ # <tt>order: "author.name DESC"</tt> will work but <tt>order: "name DESC"</tt> will not.
+ #
+ # If you want to load all posts (including posts with no approved comments), then write
+ # your own <tt>LEFT OUTER JOIN</tt> query using <tt>ON</tt>:
+ #
+ # Post.joins("LEFT OUTER JOIN comments ON comments.post_id = posts.id AND comments.approved = '1'")
+ #
+ # In this case, it is usually more natural to include an association which has conditions defined on it:
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :approved_comments, -> { where(approved: true) }, class_name: 'Comment'
+ # end
+ #
+ # Post.includes(:approved_comments)
+ #
+ # This will load posts and eager load the +approved_comments+ association, which contains
+ # only those comments that have been approved.
+ #
+ # If you eager load an association with a specified <tt>:limit</tt> option, it will be ignored,
+ # returning all the associated objects:
+ #
+ # class Picture < ActiveRecord::Base
+ # has_many :most_recent_comments, -> { order('id DESC').limit(10) }, class_name: 'Comment'
+ # end
+ #
+ # Picture.includes(:most_recent_comments).first.most_recent_comments # => returns all associated comments.
+ #
+ # Eager loading is supported with polymorphic associations.
+ #
+ # class Address < ActiveRecord::Base
+ # belongs_to :addressable, polymorphic: true
+ # end
+ #
+ # A call that tries to eager load the addressable model
+ #
+ # Address.includes(:addressable)
+ #
+ # This will execute one query to load the addresses and load the addressables with one
+ # query per addressable type.
+ # For example, if all the addressables are either of class Person or Company, then a total
+ # of 3 queries will be executed. The list of addressable types to load is determined on
+ # the back of the addresses loaded. This is not supported if Active Record has to fallback
+ # to the previous implementation of eager loading and will raise ActiveRecord::EagerLoadPolymorphicError.
+ # The reason is that the parent model's type is a column value so its corresponding table
+ # name cannot be put in the +FROM+/+JOIN+ clauses of that query.
+ #
+ # == Table Aliasing
+ #
+ # Active Record uses table aliasing in the case that a table is referenced multiple times
+ # in a join. If a table is referenced only once, the standard table name is used. The
+ # second time, the table is aliased as <tt>#{reflection_name}_#{parent_table_name}</tt>.
+ # Indexes are appended for any more successive uses of the table name.
+ #
+ # Post.joins(:comments)
+ # # => SELECT ... FROM posts INNER JOIN comments ON ...
+ # Post.joins(:special_comments) # STI
+ # # => SELECT ... FROM posts INNER JOIN comments ON ... AND comments.type = 'SpecialComment'
+ # Post.joins(:comments, :special_comments) # special_comments is the reflection name, posts is the parent table name
+ # # => SELECT ... FROM posts INNER JOIN comments ON ... INNER JOIN comments special_comments_posts
+ #
+ # Acts as tree example:
+ #
+ # TreeMixin.joins(:children)
+ # # => SELECT ... FROM mixins INNER JOIN mixins childrens_mixins ...
+ # TreeMixin.joins(children: :parent)
+ # # => SELECT ... FROM mixins INNER JOIN mixins childrens_mixins ...
+ # INNER JOIN parents_mixins ...
+ # TreeMixin.joins(children: {parent: :children})
+ # # => SELECT ... FROM mixins INNER JOIN mixins childrens_mixins ...
+ # INNER JOIN parents_mixins ...
+ # INNER JOIN mixins childrens_mixins_2
+ #
+ # Has and Belongs to Many join tables use the same idea, but add a <tt>_join</tt> suffix:
+ #
+ # Post.joins(:categories)
+ # # => SELECT ... FROM posts INNER JOIN categories_posts ... INNER JOIN categories ...
+ # Post.joins(categories: :posts)
+ # # => SELECT ... FROM posts INNER JOIN categories_posts ... INNER JOIN categories ...
+ # INNER JOIN categories_posts posts_categories_join INNER JOIN posts posts_categories
+ # Post.joins(categories: {posts: :categories})
+ # # => SELECT ... FROM posts INNER JOIN categories_posts ... INNER JOIN categories ...
+ # INNER JOIN categories_posts posts_categories_join INNER JOIN posts posts_categories
+ # INNER JOIN categories_posts categories_posts_join INNER JOIN categories categories_posts_2
+ #
+ # If you wish to specify your own custom joins using ActiveRecord::QueryMethods#joins method, those table
+ # names will take precedence over the eager associations:
+ #
+ # Post.joins(:comments).joins("inner join comments ...")
+ # # => SELECT ... FROM posts INNER JOIN comments_posts ON ... INNER JOIN comments ...
+ # Post.joins(:comments, :special_comments).joins("inner join comments ...")
+ # # => SELECT ... FROM posts INNER JOIN comments comments_posts ON ...
+ # INNER JOIN comments special_comments_posts ...
+ # INNER JOIN comments ...
+ #
+ # Table aliases are automatically truncated according to the maximum length of table identifiers
+ # according to the specific database.
+ #
+ # == Modules
+ #
+ # By default, associations will look for objects within the current module scope. Consider:
+ #
+ # module MyApplication
+ # module Business
+ # class Firm < ActiveRecord::Base
+ # has_many :clients
+ # end
+ #
+ # class Client < ActiveRecord::Base; end
+ # end
+ # end
+ #
+ # When <tt>Firm#clients</tt> is called, it will in turn call
+ # <tt>MyApplication::Business::Client.find_all_by_firm_id(firm.id)</tt>.
+ # If you want to associate with a class in another module scope, this can be done by
+ # specifying the complete class name.
+ #
+ # module MyApplication
+ # module Business
+ # class Firm < ActiveRecord::Base; end
+ # end
+ #
+ # module Billing
+ # class Account < ActiveRecord::Base
+ # belongs_to :firm, class_name: "MyApplication::Business::Firm"
+ # end
+ # end
+ # end
+ #
+ # == Bi-directional associations
+ #
+ # When you specify an association, there is usually an association on the associated model
+ # that specifies the same relationship in reverse. For example, with the following models:
+ #
+ # class Dungeon < ActiveRecord::Base
+ # has_many :traps
+ # has_one :evil_wizard
+ # end
+ #
+ # class Trap < ActiveRecord::Base
+ # belongs_to :dungeon
+ # end
+ #
+ # class EvilWizard < ActiveRecord::Base
+ # belongs_to :dungeon
+ # end
+ #
+ # The +traps+ association on +Dungeon+ and the +dungeon+ association on +Trap+ are
+ # the inverse of each other, and the inverse of the +dungeon+ association on +EvilWizard+
+ # is the +evil_wizard+ association on +Dungeon+ (and vice-versa). By default,
+ # Active Record can guess the inverse of the association based on the name
+ # of the class. The result is the following:
+ #
+ # d = Dungeon.first
+ # t = d.traps.first
+ # d.object_id == t.dungeon.object_id # => true
+ #
+ # The +Dungeon+ instances +d+ and <tt>t.dungeon</tt> in the above example refer to
+ # the same in-memory instance since the association matches the name of the class.
+ # The result would be the same if we added +:inverse_of+ to our model definitions:
+ #
+ # class Dungeon < ActiveRecord::Base
+ # has_many :traps, inverse_of: :dungeon
+ # has_one :evil_wizard, inverse_of: :dungeon
+ # end
+ #
+ # class Trap < ActiveRecord::Base
+ # belongs_to :dungeon, inverse_of: :traps
+ # end
+ #
+ # class EvilWizard < ActiveRecord::Base
+ # belongs_to :dungeon, inverse_of: :evil_wizard
+ # end
+ #
+ # There are limitations to <tt>:inverse_of</tt> support:
+ #
+ # * does not work with <tt>:through</tt> associations.
+ # * does not work with <tt>:polymorphic</tt> associations.
+ # * inverse associations for #belongs_to associations #has_many are ignored.
+ #
+ # For more information, see the documentation for the +:inverse_of+ option.
+ #
+ # == Deleting from associations
+ #
+ # === Dependent associations
+ #
+ # #has_many, #has_one, and #belongs_to associations support the <tt>:dependent</tt> option.
+ # This allows you to specify that associated records should be deleted when the owner is
+ # deleted.
+ #
+ # For example:
+ #
+ # class Author
+ # has_many :posts, dependent: :destroy
+ # end
+ # Author.find(1).destroy # => Will destroy all of the author's posts, too
+ #
+ # The <tt>:dependent</tt> option can have different values which specify how the deletion
+ # is done. For more information, see the documentation for this option on the different
+ # specific association types. When no option is given, the behavior is to do nothing
+ # with the associated records when destroying a record.
+ #
+ # Note that <tt>:dependent</tt> is implemented using Rails' callback
+ # system, which works by processing callbacks in order. Therefore, other
+ # callbacks declared either before or after the <tt>:dependent</tt> option
+ # can affect what it does.
+ #
+ # Note that <tt>:dependent</tt> option is ignored for #has_one <tt>:through</tt> associations.
+ #
+ # === Delete or destroy?
+ #
+ # #has_many and #has_and_belongs_to_many associations have the methods <tt>destroy</tt>,
+ # <tt>delete</tt>, <tt>destroy_all</tt> and <tt>delete_all</tt>.
+ #
+ # For #has_and_belongs_to_many, <tt>delete</tt> and <tt>destroy</tt> are the same: they
+ # cause the records in the join table to be removed.
+ #
+ # For #has_many, <tt>destroy</tt> and <tt>destroy_all</tt> will always call the <tt>destroy</tt> method of the
+ # record(s) being removed so that callbacks are run. However <tt>delete</tt> and <tt>delete_all</tt> will either
+ # do the deletion according to the strategy specified by the <tt>:dependent</tt> option, or
+ # if no <tt>:dependent</tt> option is given, then it will follow the default strategy.
+ # The default strategy is to do nothing (leave the foreign keys with the parent ids set), except for
+ # #has_many <tt>:through</tt>, where the default strategy is <tt>delete_all</tt> (delete
+ # the join records, without running their callbacks).
+ #
+ # There is also a <tt>clear</tt> method which is the same as <tt>delete_all</tt>, except that
+ # it returns the association rather than the records which have been deleted.
+ #
+ # === What gets deleted?
+ #
+ # There is a potential pitfall here: #has_and_belongs_to_many and #has_many <tt>:through</tt>
+ # associations have records in join tables, as well as the associated records. So when we
+ # call one of these deletion methods, what exactly should be deleted?
+ #
+ # The answer is that it is assumed that deletion on an association is about removing the
+ # <i>link</i> between the owner and the associated object(s), rather than necessarily the
+ # associated objects themselves. So with #has_and_belongs_to_many and #has_many
+ # <tt>:through</tt>, the join records will be deleted, but the associated records won't.
+ #
+ # This makes sense if you think about it: if you were to call <tt>post.tags.delete(Tag.find_by(name: 'food'))</tt>
+ # you would want the 'food' tag to be unlinked from the post, rather than for the tag itself
+ # to be removed from the database.
+ #
+ # However, there are examples where this strategy doesn't make sense. For example, suppose
+ # a person has many projects, and each project has many tasks. If we deleted one of a person's
+ # tasks, we would probably not want the project to be deleted. In this scenario, the delete method
+ # won't actually work: it can only be used if the association on the join model is a
+ # #belongs_to. In other situations you are expected to perform operations directly on
+ # either the associated records or the <tt>:through</tt> association.
+ #
+ # With a regular #has_many there is no distinction between the "associated records"
+ # and the "link", so there is only one choice for what gets deleted.
+ #
+ # With #has_and_belongs_to_many and #has_many <tt>:through</tt>, if you want to delete the
+ # associated records themselves, you can always do something along the lines of
+ # <tt>person.tasks.each(&:destroy)</tt>.
+ #
+ # == Type safety with ActiveRecord::AssociationTypeMismatch
+ #
+ # If you attempt to assign an object to an association that doesn't match the inferred
+ # or specified <tt>:class_name</tt>, you'll get an ActiveRecord::AssociationTypeMismatch.
+ #
+ # == Options
+ #
+ # All of the association macros can be specialized through options. This makes cases
+ # more complex than the simple and guessable ones possible.
+ module ClassMethods
+ # Specifies a one-to-many association. The following methods for retrieval and query of
+ # collections of associated objects will be added:
+ #
+ # +collection+ is a placeholder for the symbol passed as the +name+ argument, so
+ # <tt>has_many :clients</tt> would add among others <tt>clients.empty?</tt>.
+ #
+ # [collection]
+ # Returns an array of all the associated objects.
+ # An empty array is returned if none are found.
+ # [collection<<(object, ...)]
+ # Adds one or more objects to the collection by setting their foreign keys to the collection's primary key.
+ # Note that this operation instantly fires update SQL without waiting for the save or update call on the
+ # parent object, unless the parent object is a new record.
+ # This will also run validations and callbacks of associated object(s).
+ # [collection.delete(object, ...)]
+ # Removes one or more objects from the collection by setting their foreign keys to +NULL+.
+ # Objects will be in addition destroyed if they're associated with <tt>dependent: :destroy</tt>,
+ # and deleted if they're associated with <tt>dependent: :delete_all</tt>.
+ #
+ # If the <tt>:through</tt> option is used, then the join records are deleted (rather than
+ # nullified) by default, but you can specify <tt>dependent: :destroy</tt> or
+ # <tt>dependent: :nullify</tt> to override this.
+ # [collection.destroy(object, ...)]
+ # Removes one or more objects from the collection by running <tt>destroy</tt> on
+ # each record, regardless of any dependent option, ensuring callbacks are run.
+ #
+ # If the <tt>:through</tt> option is used, then the join records are destroyed
+ # instead, not the objects themselves.
+ # [collection=objects]
+ # Replaces the collections content by deleting and adding objects as appropriate. If the <tt>:through</tt>
+ # option is true callbacks in the join models are triggered except destroy callbacks, since deletion is
+ # direct by default. You can specify <tt>dependent: :destroy</tt> or
+ # <tt>dependent: :nullify</tt> to override this.
+ # [collection_singular_ids]
+ # Returns an array of the associated objects' ids
+ # [collection_singular_ids=ids]
+ # Replace the collection with the objects identified by the primary keys in +ids+. This
+ # method loads the models and calls <tt>collection=</tt>. See above.
+ # [collection.clear]
+ # Removes every object from the collection. This destroys the associated objects if they
+ # are associated with <tt>dependent: :destroy</tt>, deletes them directly from the
+ # database if <tt>dependent: :delete_all</tt>, otherwise sets their foreign keys to +NULL+.
+ # If the <tt>:through</tt> option is true no destroy callbacks are invoked on the join models.
+ # Join models are directly deleted.
+ # [collection.empty?]
+ # Returns +true+ if there are no associated objects.
+ # [collection.size]
+ # Returns the number of associated objects.
+ # [collection.find(...)]
+ # Finds an associated object according to the same rules as ActiveRecord::FinderMethods#find.
+ # [collection.exists?(...)]
+ # Checks whether an associated object with the given conditions exists.
+ # Uses the same rules as ActiveRecord::FinderMethods#exists?.
+ # [collection.build(attributes = {}, ...)]
+ # Returns one or more new objects of the collection type that have been instantiated
+ # with +attributes+ and linked to this object through a foreign key, but have not yet
+ # been saved.
+ # [collection.create(attributes = {})]
+ # Returns a new object of the collection type that has been instantiated
+ # with +attributes+, linked to this object through a foreign key, and that has already
+ # been saved (if it passed the validation). *Note*: This only works if the base model
+ # already exists in the DB, not if it is a new (unsaved) record!
+ # [collection.create!(attributes = {})]
+ # Does the same as <tt>collection.create</tt>, but raises ActiveRecord::RecordInvalid
+ # if the record is invalid.
+ #
+ # === Example
+ #
+ # A <tt>Firm</tt> class declares <tt>has_many :clients</tt>, which will add:
+ # * <tt>Firm#clients</tt> (similar to <tt>Client.where(firm_id: id)</tt>)
+ # * <tt>Firm#clients<<</tt>
+ # * <tt>Firm#clients.delete</tt>
+ # * <tt>Firm#clients.destroy</tt>
+ # * <tt>Firm#clients=</tt>
+ # * <tt>Firm#client_ids</tt>
+ # * <tt>Firm#client_ids=</tt>
+ # * <tt>Firm#clients.clear</tt>
+ # * <tt>Firm#clients.empty?</tt> (similar to <tt>firm.clients.size == 0</tt>)
+ # * <tt>Firm#clients.size</tt> (similar to <tt>Client.count "firm_id = #{id}"</tt>)
+ # * <tt>Firm#clients.find</tt> (similar to <tt>Client.where(firm_id: id).find(id)</tt>)
+ # * <tt>Firm#clients.exists?(name: 'ACME')</tt> (similar to <tt>Client.exists?(name: 'ACME', firm_id: firm.id)</tt>)
+ # * <tt>Firm#clients.build</tt> (similar to <tt>Client.new("firm_id" => id)</tt>)
+ # * <tt>Firm#clients.create</tt> (similar to <tt>c = Client.new("firm_id" => id); c.save; c</tt>)
+ # * <tt>Firm#clients.create!</tt> (similar to <tt>c = Client.new("firm_id" => id); c.save!</tt>)
+ # The declaration can also include an +options+ hash to specialize the behavior of the association.
+ #
+ # === Scopes
+ #
+ # You can pass a second argument +scope+ as a callable (i.e. proc or
+ # lambda) to retrieve a specific set of records or customize the generated
+ # query when you access the associated collection.
+ #
+ # Scope examples:
+ # has_many :comments, -> { where(author_id: 1) }
+ # has_many :employees, -> { joins(:address) }
+ # has_many :posts, ->(blog) { where("max_post_length > ?", blog.max_post_length) }
+ #
+ # === Extensions
+ #
+ # The +extension+ argument allows you to pass a block into a has_many
+ # association. This is useful for adding new finders, creators and other
+ # factory-type methods to be used as part of the association.
+ #
+ # Extension examples:
+ # has_many :employees do
+ # def find_or_create_by_name(name)
+ # first_name, last_name = name.split(" ", 2)
+ # find_or_create_by(first_name: first_name, last_name: last_name)
+ # end
+ # end
+ #
+ # === Options
+ # [:class_name]
+ # Specify the class name of the association. Use it only if that name can't be inferred
+ # from the association name. So <tt>has_many :products</tt> will by default be linked
+ # to the +Product+ class, but if the real class name is +SpecialProduct+, you'll have to
+ # specify it with this option.
+ # [:foreign_key]
+ # Specify the foreign key used for the association. By default this is guessed to be the name
+ # of this class in lower-case and "_id" suffixed. So a Person class that makes a #has_many
+ # association will use "person_id" as the default <tt>:foreign_key</tt>.
+ # [:foreign_type]
+ # Specify the column used to store the associated object's type, if this is a polymorphic
+ # association. By default this is guessed to be the name of the polymorphic association
+ # specified on "as" option with a "_type" suffix. So a class that defines a
+ # <tt>has_many :tags, as: :taggable</tt> association will use "taggable_type" as the
+ # default <tt>:foreign_type</tt>.
+ # [:primary_key]
+ # Specify the name of the column to use as the primary key for the association. By default this is +id+.
+ # [:dependent]
+ # Controls what happens to the associated objects when
+ # their owner is destroyed. Note that these are implemented as
+ # callbacks, and Rails executes callbacks in order. Therefore, other
+ # similar callbacks may affect the <tt>:dependent</tt> behavior, and the
+ # <tt>:dependent</tt> behavior may affect other callbacks.
+ #
+ # * <tt>:destroy</tt> causes all the associated objects to also be destroyed.
+ # * <tt>:delete_all</tt> causes all the associated objects to be deleted directly from the database (so callbacks will not be executed).
+ # * <tt>:nullify</tt> causes the foreign keys to be set to +NULL+. Callbacks are not executed.
+ # * <tt>:restrict_with_exception</tt> causes an exception to be raised if there are any associated records.
+ # * <tt>:restrict_with_error</tt> causes an error to be added to the owner if there are any associated objects.
+ #
+ # If using with the <tt>:through</tt> option, the association on the join model must be
+ # a #belongs_to, and the records which get deleted are the join records, rather than
+ # the associated records.
+ #
+ # If using <tt>dependent: :destroy</tt> on a scoped association, only the scoped objects are destroyed.
+ # For example, if a Post model defines
+ # <tt>has_many :comments, -> { where published: true }, dependent: :destroy</tt> and <tt>destroy</tt> is
+ # called on a post, only published comments are destroyed. This means that any unpublished comments in the
+ # database would still contain a foreign key pointing to the now deleted post.
+ # [:counter_cache]
+ # This option can be used to configure a custom named <tt>:counter_cache.</tt> You only need this option,
+ # when you customized the name of your <tt>:counter_cache</tt> on the #belongs_to association.
+ # [:as]
+ # Specifies a polymorphic interface (See #belongs_to).
+ # [:through]
+ # Specifies an association through which to perform the query. This can be any other type
+ # of association, including other <tt>:through</tt> associations. Options for <tt>:class_name</tt>,
+ # <tt>:primary_key</tt> and <tt>:foreign_key</tt> are ignored, as the association uses the
+ # source reflection.
+ #
+ # If the association on the join model is a #belongs_to, the collection can be modified
+ # and the records on the <tt>:through</tt> model will be automatically created and removed
+ # as appropriate. Otherwise, the collection is read-only, so you should manipulate the
+ # <tt>:through</tt> association directly.
+ #
+ # If you are going to modify the association (rather than just read from it), then it is
+ # a good idea to set the <tt>:inverse_of</tt> option on the source association on the
+ # join model. This allows associated records to be built which will automatically create
+ # the appropriate join model records when they are saved. (See the 'Association Join Models'
+ # section above.)
+ # [:source]
+ # Specifies the source association name used by #has_many <tt>:through</tt> queries.
+ # Only use it if the name cannot be inferred from the association.
+ # <tt>has_many :subscribers, through: :subscriptions</tt> will look for either <tt>:subscribers</tt> or
+ # <tt>:subscriber</tt> on Subscription, unless a <tt>:source</tt> is given.
+ # [:source_type]
+ # Specifies type of the source association used by #has_many <tt>:through</tt> queries where the source
+ # association is a polymorphic #belongs_to.
+ # [:validate]
+ # When set to +true+, validates new objects added to association when saving the parent object. +true+ by default.
+ # If you want to ensure associated objects are revalidated on every update, use +validates_associated+.
+ # [:autosave]
+ # If true, always save the associated objects or destroy them if marked for destruction,
+ # when saving the parent object. If false, never save or destroy the associated objects.
+ # By default, only save associated objects that are new records. This option is implemented as a
+ # +before_save+ callback. Because callbacks are run in the order they are defined, associated objects
+ # may need to be explicitly saved in any user-defined +before_save+ callbacks.
+ #
+ # Note that NestedAttributes::ClassMethods#accepts_nested_attributes_for sets
+ # <tt>:autosave</tt> to <tt>true</tt>.
+ # [:inverse_of]
+ # Specifies the name of the #belongs_to association on the associated object
+ # that is the inverse of this #has_many association. Does not work in combination
+ # with <tt>:through</tt> or <tt>:as</tt> options.
+ # See ActiveRecord::Associations::ClassMethods's overview on Bi-directional associations for more detail.
+ # [:extend]
+ # Specifies a module or array of modules that will be extended into the association object returned.
+ # Useful for defining methods on associations, especially when they should be shared between multiple
+ # association objects.
+ #
+ # Option examples:
+ # has_many :comments, -> { order("posted_on") }
+ # has_many :comments, -> { includes(:author) }
+ # has_many :people, -> { where(deleted: false).order("name") }, class_name: "Person"
+ # has_many :tracks, -> { order("position") }, dependent: :destroy
+ # has_many :comments, dependent: :nullify
+ # has_many :tags, as: :taggable
+ # has_many :reports, -> { readonly }
+ # has_many :subscribers, through: :subscriptions, source: :user
+ def has_many(name, scope = nil, options = {}, &extension)
+ reflection = Builder::HasMany.build(self, name, scope, options, &extension)
+ Reflection.add_reflection self, name, reflection
+ end
+
+ # Specifies a one-to-one association with another class. This method should only be used
+ # if the other class contains the foreign key. If the current class contains the foreign key,
+ # then you should use #belongs_to instead. See also ActiveRecord::Associations::ClassMethods's overview
+ # on when to use #has_one and when to use #belongs_to.
+ #
+ # The following methods for retrieval and query of a single associated object will be added:
+ #
+ # +association+ is a placeholder for the symbol passed as the +name+ argument, so
+ # <tt>has_one :manager</tt> would add among others <tt>manager.nil?</tt>.
+ #
+ # [association]
+ # Returns the associated object. +nil+ is returned if none is found.
+ # [association=(associate)]
+ # Assigns the associate object, extracts the primary key, sets it as the foreign key,
+ # and saves the associate object. To avoid database inconsistencies, permanently deletes an existing
+ # associated object when assigning a new one, even if the new one isn't saved to database.
+ # [build_association(attributes = {})]
+ # Returns a new object of the associated type that has been instantiated
+ # with +attributes+ and linked to this object through a foreign key, but has not
+ # yet been saved.
+ # [create_association(attributes = {})]
+ # Returns a new object of the associated type that has been instantiated
+ # with +attributes+, linked to this object through a foreign key, and that
+ # has already been saved (if it passed the validation).
+ # [create_association!(attributes = {})]
+ # Does the same as <tt>create_association</tt>, but raises ActiveRecord::RecordInvalid
+ # if the record is invalid.
+ #
+ # === Example
+ #
+ # An Account class declares <tt>has_one :beneficiary</tt>, which will add:
+ # * <tt>Account#beneficiary</tt> (similar to <tt>Beneficiary.where(account_id: id).first</tt>)
+ # * <tt>Account#beneficiary=(beneficiary)</tt> (similar to <tt>beneficiary.account_id = account.id; beneficiary.save</tt>)
+ # * <tt>Account#build_beneficiary</tt> (similar to <tt>Beneficiary.new("account_id" => id)</tt>)
+ # * <tt>Account#create_beneficiary</tt> (similar to <tt>b = Beneficiary.new("account_id" => id); b.save; b</tt>)
+ # * <tt>Account#create_beneficiary!</tt> (similar to <tt>b = Beneficiary.new("account_id" => id); b.save!; b</tt>)
+ #
+ # === Scopes
+ #
+ # You can pass a second argument +scope+ as a callable (i.e. proc or
+ # lambda) to retrieve a specific record or customize the generated query
+ # when you access the associated object.
+ #
+ # Scope examples:
+ # has_one :author, -> { where(comment_id: 1) }
+ # has_one :employer, -> { joins(:company) }
+ # has_one :latest_post, ->(blog) { where("created_at > ?", blog.enabled_at) }
+ #
+ # === Options
+ #
+ # The declaration can also include an +options+ hash to specialize the behavior of the association.
+ #
+ # Options are:
+ # [:class_name]
+ # Specify the class name of the association. Use it only if that name can't be inferred
+ # from the association name. So <tt>has_one :manager</tt> will by default be linked to the Manager class, but
+ # if the real class name is Person, you'll have to specify it with this option.
+ # [:dependent]
+ # Controls what happens to the associated object when
+ # its owner is destroyed:
+ #
+ # * <tt>:destroy</tt> causes the associated object to also be destroyed
+ # * <tt>:delete</tt> causes the associated object to be deleted directly from the database (so callbacks will not execute)
+ # * <tt>:nullify</tt> causes the foreign key to be set to +NULL+. Callbacks are not executed.
+ # * <tt>:restrict_with_exception</tt> causes an exception to be raised if there is an associated record
+ # * <tt>:restrict_with_error</tt> causes an error to be added to the owner if there is an associated object
+ #
+ # Note that <tt>:dependent</tt> option is ignored when using <tt>:through</tt> option.
+ # [:foreign_key]
+ # Specify the foreign key used for the association. By default this is guessed to be the name
+ # of this class in lower-case and "_id" suffixed. So a Person class that makes a #has_one association
+ # will use "person_id" as the default <tt>:foreign_key</tt>.
+ # [:foreign_type]
+ # Specify the column used to store the associated object's type, if this is a polymorphic
+ # association. By default this is guessed to be the name of the polymorphic association
+ # specified on "as" option with a "_type" suffix. So a class that defines a
+ # <tt>has_one :tag, as: :taggable</tt> association will use "taggable_type" as the
+ # default <tt>:foreign_type</tt>.
+ # [:primary_key]
+ # Specify the method that returns the primary key used for the association. By default this is +id+.
+ # [:as]
+ # Specifies a polymorphic interface (See #belongs_to).
+ # [:through]
+ # Specifies a Join Model through which to perform the query. Options for <tt>:class_name</tt>,
+ # <tt>:primary_key</tt>, and <tt>:foreign_key</tt> are ignored, as the association uses the
+ # source reflection. You can only use a <tt>:through</tt> query through a #has_one
+ # or #belongs_to association on the join model.
+ # [:source]
+ # Specifies the source association name used by #has_one <tt>:through</tt> queries.
+ # Only use it if the name cannot be inferred from the association.
+ # <tt>has_one :favorite, through: :favorites</tt> will look for a
+ # <tt>:favorite</tt> on Favorite, unless a <tt>:source</tt> is given.
+ # [:source_type]
+ # Specifies type of the source association used by #has_one <tt>:through</tt> queries where the source
+ # association is a polymorphic #belongs_to.
+ # [:validate]
+ # When set to +true+, validates new objects added to association when saving the parent object. +false+ by default.
+ # If you want to ensure associated objects are revalidated on every update, use +validates_associated+.
+ # [:autosave]
+ # If true, always save the associated object or destroy it if marked for destruction,
+ # when saving the parent object. If false, never save or destroy the associated object.
+ # By default, only save the associated object if it's a new record.
+ #
+ # Note that NestedAttributes::ClassMethods#accepts_nested_attributes_for sets
+ # <tt>:autosave</tt> to <tt>true</tt>.
+ # [:inverse_of]
+ # Specifies the name of the #belongs_to association on the associated object
+ # that is the inverse of this #has_one association. Does not work in combination
+ # with <tt>:through</tt> or <tt>:as</tt> options.
+ # See ActiveRecord::Associations::ClassMethods's overview on Bi-directional associations for more detail.
+ # [:required]
+ # When set to +true+, the association will also have its presence validated.
+ # This will validate the association itself, not the id. You can use
+ # +:inverse_of+ to avoid an extra query during validation.
+ #
+ # Option examples:
+ # has_one :credit_card, dependent: :destroy # destroys the associated credit card
+ # has_one :credit_card, dependent: :nullify # updates the associated records foreign
+ # # key value to NULL rather than destroying it
+ # has_one :last_comment, -> { order('posted_on') }, class_name: "Comment"
+ # has_one :project_manager, -> { where(role: 'project_manager') }, class_name: "Person"
+ # has_one :attachment, as: :attachable
+ # has_one :boss, -> { readonly }
+ # has_one :club, through: :membership
+ # has_one :primary_address, -> { where(primary: true) }, through: :addressables, source: :addressable
+ # has_one :credit_card, required: true
+ def has_one(name, scope = nil, options = {})
+ reflection = Builder::HasOne.build(self, name, scope, options)
+ Reflection.add_reflection self, name, reflection
+ end
+
+ # Specifies a one-to-one association with another class. This method should only be used
+ # if this class contains the foreign key. If the other class contains the foreign key,
+ # then you should use #has_one instead. See also ActiveRecord::Associations::ClassMethods's overview
+ # on when to use #has_one and when to use #belongs_to.
+ #
+ # Methods will be added for retrieval and query for a single associated object, for which
+ # this object holds an id:
+ #
+ # +association+ is a placeholder for the symbol passed as the +name+ argument, so
+ # <tt>belongs_to :author</tt> would add among others <tt>author.nil?</tt>.
+ #
+ # [association]
+ # Returns the associated object. +nil+ is returned if none is found.
+ # [association=(associate)]
+ # Assigns the associate object, extracts the primary key, and sets it as the foreign key.
+ # [build_association(attributes = {})]
+ # Returns a new object of the associated type that has been instantiated
+ # with +attributes+ and linked to this object through a foreign key, but has not yet been saved.
+ # [create_association(attributes = {})]
+ # Returns a new object of the associated type that has been instantiated
+ # with +attributes+, linked to this object through a foreign key, and that
+ # has already been saved (if it passed the validation).
+ # [create_association!(attributes = {})]
+ # Does the same as <tt>create_association</tt>, but raises ActiveRecord::RecordInvalid
+ # if the record is invalid.
+ #
+ # === Example
+ #
+ # A Post class declares <tt>belongs_to :author</tt>, which will add:
+ # * <tt>Post#author</tt> (similar to <tt>Author.find(author_id)</tt>)
+ # * <tt>Post#author=(author)</tt> (similar to <tt>post.author_id = author.id</tt>)
+ # * <tt>Post#build_author</tt> (similar to <tt>post.author = Author.new</tt>)
+ # * <tt>Post#create_author</tt> (similar to <tt>post.author = Author.new; post.author.save; post.author</tt>)
+ # * <tt>Post#create_author!</tt> (similar to <tt>post.author = Author.new; post.author.save!; post.author</tt>)
+ # The declaration can also include an +options+ hash to specialize the behavior of the association.
+ #
+ # === Scopes
+ #
+ # You can pass a second argument +scope+ as a callable (i.e. proc or
+ # lambda) to retrieve a specific record or customize the generated query
+ # when you access the associated object.
+ #
+ # Scope examples:
+ # belongs_to :firm, -> { where(id: 2) }
+ # belongs_to :user, -> { joins(:friends) }
+ # belongs_to :level, ->(game) { where("game_level > ?", game.current_level) }
+ #
+ # === Options
+ #
+ # [:class_name]
+ # Specify the class name of the association. Use it only if that name can't be inferred
+ # from the association name. So <tt>belongs_to :author</tt> will by default be linked to the Author class, but
+ # if the real class name is Person, you'll have to specify it with this option.
+ # [:foreign_key]
+ # Specify the foreign key used for the association. By default this is guessed to be the name
+ # of the association with an "_id" suffix. So a class that defines a <tt>belongs_to :person</tt>
+ # association will use "person_id" as the default <tt>:foreign_key</tt>. Similarly,
+ # <tt>belongs_to :favorite_person, class_name: "Person"</tt> will use a foreign key
+ # of "favorite_person_id".
+ # [:foreign_type]
+ # Specify the column used to store the associated object's type, if this is a polymorphic
+ # association. By default this is guessed to be the name of the association with a "_type"
+ # suffix. So a class that defines a <tt>belongs_to :taggable, polymorphic: true</tt>
+ # association will use "taggable_type" as the default <tt>:foreign_type</tt>.
+ # [:primary_key]
+ # Specify the method that returns the primary key of associated object used for the association.
+ # By default this is id.
+ # [:dependent]
+ # If set to <tt>:destroy</tt>, the associated object is destroyed when this object is. If set to
+ # <tt>:delete</tt>, the associated object is deleted *without* calling its destroy method.
+ # This option should not be specified when #belongs_to is used in conjunction with
+ # a #has_many relationship on another class because of the potential to leave
+ # orphaned records behind.
+ # [:counter_cache]
+ # Caches the number of belonging objects on the associate class through the use of CounterCache::ClassMethods#increment_counter
+ # and CounterCache::ClassMethods#decrement_counter. The counter cache is incremented when an object of this
+ # class is created and decremented when it's destroyed. This requires that a column
+ # named <tt>#{table_name}_count</tt> (such as +comments_count+ for a belonging Comment class)
+ # is used on the associate class (such as a Post class) - that is the migration for
+ # <tt>#{table_name}_count</tt> is created on the associate class (such that <tt>Post.comments_count</tt> will
+ # return the count cached, see note below). You can also specify a custom counter
+ # cache column by providing a column name instead of a +true+/+false+ value to this
+ # option (e.g., <tt>counter_cache: :my_custom_counter</tt>.)
+ # Note: Specifying a counter cache will add it to that model's list of readonly attributes
+ # using +attr_readonly+.
+ # [:polymorphic]
+ # Specify this association is a polymorphic association by passing +true+.
+ # Note: If you've enabled the counter cache, then you may want to add the counter cache attribute
+ # to the +attr_readonly+ list in the associated classes (e.g. <tt>class Post; attr_readonly :comments_count; end</tt>).
+ # [:validate]
+ # When set to +true+, validates new objects added to association when saving the parent object. +false+ by default.
+ # If you want to ensure associated objects are revalidated on every update, use +validates_associated+.
+ # [:autosave]
+ # If true, always save the associated object or destroy it if marked for destruction, when
+ # saving the parent object.
+ # If false, never save or destroy the associated object.
+ # By default, only save the associated object if it's a new record.
+ #
+ # Note that NestedAttributes::ClassMethods#accepts_nested_attributes_for
+ # sets <tt>:autosave</tt> to <tt>true</tt>.
+ # [:touch]
+ # If true, the associated object will be touched (the updated_at/on attributes set to current time)
+ # when this record is either saved or destroyed. If you specify a symbol, that attribute
+ # will be updated with the current time in addition to the updated_at/on attribute.
+ # Please note that with touching no validation is performed and only the +after_touch+,
+ # +after_commit+ and +after_rollback+ callbacks are executed.
+ # [:inverse_of]
+ # Specifies the name of the #has_one or #has_many association on the associated
+ # object that is the inverse of this #belongs_to association. Does not work in
+ # combination with the <tt>:polymorphic</tt> options.
+ # See ActiveRecord::Associations::ClassMethods's overview on Bi-directional associations for more detail.
+ # [:optional]
+ # When set to +true+, the association will not have its presence validated.
+ # [:required]
+ # When set to +true+, the association will also have its presence validated.
+ # This will validate the association itself, not the id. You can use
+ # +:inverse_of+ to avoid an extra query during validation.
+ # NOTE: <tt>required</tt> is set to <tt>true</tt> by default and is deprecated. If
+ # you don't want to have association presence validated, use <tt>optional: true</tt>.
+ # [:default]
+ # Provide a callable (i.e. proc or lambda) to specify that the association should
+ # be initialized with a particular record before validation.
+ #
+ # Option examples:
+ # belongs_to :firm, foreign_key: "client_of"
+ # belongs_to :person, primary_key: "name", foreign_key: "person_name"
+ # belongs_to :author, class_name: "Person", foreign_key: "author_id"
+ # belongs_to :valid_coupon, ->(o) { where "discounts > ?", o.payments_count },
+ # class_name: "Coupon", foreign_key: "coupon_id"
+ # belongs_to :attachable, polymorphic: true
+ # belongs_to :project, -> { readonly }
+ # belongs_to :post, counter_cache: true
+ # belongs_to :comment, touch: true
+ # belongs_to :company, touch: :employees_last_updated_at
+ # belongs_to :user, optional: true
+ # belongs_to :account, default: -> { company.account }
+ def belongs_to(name, scope = nil, options = {})
+ reflection = Builder::BelongsTo.build(self, name, scope, options)
+ Reflection.add_reflection self, name, reflection
+ end
+
+ # Specifies a many-to-many relationship with another class. This associates two classes via an
+ # intermediate join table. Unless the join table is explicitly specified as an option, it is
+ # guessed using the lexical order of the class names. So a join between Developer and Project
+ # will give the default join table name of "developers_projects" because "D" precedes "P" alphabetically.
+ # Note that this precedence is calculated using the <tt><</tt> operator for String. This
+ # means that if the strings are of different lengths, and the strings are equal when compared
+ # up to the shortest length, then the longer string is considered of higher
+ # lexical precedence than the shorter one. For example, one would expect the tables "paper_boxes" and "papers"
+ # to generate a join table name of "papers_paper_boxes" because of the length of the name "paper_boxes",
+ # but it in fact generates a join table name of "paper_boxes_papers". Be aware of this caveat, and use the
+ # custom <tt>:join_table</tt> option if you need to.
+ # If your tables share a common prefix, it will only appear once at the beginning. For example,
+ # the tables "catalog_categories" and "catalog_products" generate a join table name of "catalog_categories_products".
+ #
+ # The join table should not have a primary key or a model associated with it. You must manually generate the
+ # join table with a migration such as this:
+ #
+ # class CreateDevelopersProjectsJoinTable < ActiveRecord::Migration[5.0]
+ # def change
+ # create_join_table :developers, :projects
+ # end
+ # end
+ #
+ # It's also a good idea to add indexes to each of those columns to speed up the joins process.
+ # However, in MySQL it is advised to add a compound index for both of the columns as MySQL only
+ # uses one index per table during the lookup.
+ #
+ # Adds the following methods for retrieval and query:
+ #
+ # +collection+ is a placeholder for the symbol passed as the +name+ argument, so
+ # <tt>has_and_belongs_to_many :categories</tt> would add among others <tt>categories.empty?</tt>.
+ #
+ # [collection]
+ # Returns an array of all the associated objects.
+ # An empty array is returned if none are found.
+ # [collection<<(object, ...)]
+ # Adds one or more objects to the collection by creating associations in the join table
+ # (<tt>collection.push</tt> and <tt>collection.concat</tt> are aliases to this method).
+ # Note that this operation instantly fires update SQL without waiting for the save or update call on the
+ # parent object, unless the parent object is a new record.
+ # [collection.delete(object, ...)]
+ # Removes one or more objects from the collection by removing their associations from the join table.
+ # This does not destroy the objects.
+ # [collection.destroy(object, ...)]
+ # Removes one or more objects from the collection by running destroy on each association in the join table, overriding any dependent option.
+ # This does not destroy the objects.
+ # [collection=objects]
+ # Replaces the collection's content by deleting and adding objects as appropriate.
+ # [collection_singular_ids]
+ # Returns an array of the associated objects' ids.
+ # [collection_singular_ids=ids]
+ # Replace the collection by the objects identified by the primary keys in +ids+.
+ # [collection.clear]
+ # Removes every object from the collection. This does not destroy the objects.
+ # [collection.empty?]
+ # Returns +true+ if there are no associated objects.
+ # [collection.size]
+ # Returns the number of associated objects.
+ # [collection.find(id)]
+ # Finds an associated object responding to the +id+ and that
+ # meets the condition that it has to be associated with this object.
+ # Uses the same rules as ActiveRecord::FinderMethods#find.
+ # [collection.exists?(...)]
+ # Checks whether an associated object with the given conditions exists.
+ # Uses the same rules as ActiveRecord::FinderMethods#exists?.
+ # [collection.build(attributes = {})]
+ # Returns a new object of the collection type that has been instantiated
+ # with +attributes+ and linked to this object through the join table, but has not yet been saved.
+ # [collection.create(attributes = {})]
+ # Returns a new object of the collection type that has been instantiated
+ # with +attributes+, linked to this object through the join table, and that has already been
+ # saved (if it passed the validation).
+ #
+ # === Example
+ #
+ # A Developer class declares <tt>has_and_belongs_to_many :projects</tt>, which will add:
+ # * <tt>Developer#projects</tt>
+ # * <tt>Developer#projects<<</tt>
+ # * <tt>Developer#projects.delete</tt>
+ # * <tt>Developer#projects.destroy</tt>
+ # * <tt>Developer#projects=</tt>
+ # * <tt>Developer#project_ids</tt>
+ # * <tt>Developer#project_ids=</tt>
+ # * <tt>Developer#projects.clear</tt>
+ # * <tt>Developer#projects.empty?</tt>
+ # * <tt>Developer#projects.size</tt>
+ # * <tt>Developer#projects.find(id)</tt>
+ # * <tt>Developer#projects.exists?(...)</tt>
+ # * <tt>Developer#projects.build</tt> (similar to <tt>Project.new("developer_id" => id)</tt>)
+ # * <tt>Developer#projects.create</tt> (similar to <tt>c = Project.new("developer_id" => id); c.save; c</tt>)
+ # The declaration may include an +options+ hash to specialize the behavior of the association.
+ #
+ # === Scopes
+ #
+ # You can pass a second argument +scope+ as a callable (i.e. proc or
+ # lambda) to retrieve a specific set of records or customize the generated
+ # query when you access the associated collection.
+ #
+ # Scope examples:
+ # has_and_belongs_to_many :projects, -> { includes(:milestones, :manager) }
+ # has_and_belongs_to_many :categories, ->(post) {
+ # where("default_category = ?", post.default_category)
+ #
+ # === Extensions
+ #
+ # The +extension+ argument allows you to pass a block into a
+ # has_and_belongs_to_many association. This is useful for adding new
+ # finders, creators and other factory-type methods to be used as part of
+ # the association.
+ #
+ # Extension examples:
+ # has_and_belongs_to_many :contractors do
+ # def find_or_create_by_name(name)
+ # first_name, last_name = name.split(" ", 2)
+ # find_or_create_by(first_name: first_name, last_name: last_name)
+ # end
+ # end
+ #
+ # === Options
+ #
+ # [:class_name]
+ # Specify the class name of the association. Use it only if that name can't be inferred
+ # from the association name. So <tt>has_and_belongs_to_many :projects</tt> will by default be linked to the
+ # Project class, but if the real class name is SuperProject, you'll have to specify it with this option.
+ # [:join_table]
+ # Specify the name of the join table if the default based on lexical order isn't what you want.
+ # <b>WARNING:</b> If you're overwriting the table name of either class, the +table_name+ method
+ # MUST be declared underneath any #has_and_belongs_to_many declaration in order to work.
+ # [:foreign_key]
+ # Specify the foreign key used for the association. By default this is guessed to be the name
+ # of this class in lower-case and "_id" suffixed. So a Person class that makes
+ # a #has_and_belongs_to_many association to Project will use "person_id" as the
+ # default <tt>:foreign_key</tt>.
+ # [:association_foreign_key]
+ # Specify the foreign key used for the association on the receiving side of the association.
+ # By default this is guessed to be the name of the associated class in lower-case and "_id" suffixed.
+ # So if a Person class makes a #has_and_belongs_to_many association to Project,
+ # the association will use "project_id" as the default <tt>:association_foreign_key</tt>.
+ # [:validate]
+ # When set to +true+, validates new objects added to association when saving the parent object. +true+ by default.
+ # If you want to ensure associated objects are revalidated on every update, use +validates_associated+.
+ # [:autosave]
+ # If true, always save the associated objects or destroy them if marked for destruction, when
+ # saving the parent object.
+ # If false, never save or destroy the associated objects.
+ # By default, only save associated objects that are new records.
+ #
+ # Note that NestedAttributes::ClassMethods#accepts_nested_attributes_for sets
+ # <tt>:autosave</tt> to <tt>true</tt>.
+ #
+ # Option examples:
+ # has_and_belongs_to_many :projects
+ # has_and_belongs_to_many :projects, -> { includes(:milestones, :manager) }
+ # has_and_belongs_to_many :nations, class_name: "Country"
+ # has_and_belongs_to_many :categories, join_table: "prods_cats"
+ # has_and_belongs_to_many :categories, -> { readonly }
+ def has_and_belongs_to_many(name, scope = nil, **options, &extension)
+ habtm_reflection = ActiveRecord::Reflection::HasAndBelongsToManyReflection.new(name, scope, options, self)
+
+ builder = Builder::HasAndBelongsToMany.new name, self, options
+
+ join_model = ActiveSupport::Deprecation.silence { builder.through_model }
+
+ const_set join_model.name, join_model
+ private_constant join_model.name
+
+ middle_reflection = builder.middle_reflection join_model
+
+ Builder::HasMany.define_callbacks self, middle_reflection
+ Reflection.add_reflection self, middle_reflection.name, middle_reflection
+ middle_reflection.parent_reflection = habtm_reflection
+
+ include Module.new {
+ class_eval <<-RUBY, __FILE__, __LINE__ + 1
+ def destroy_associations
+ association(:#{middle_reflection.name}).delete_all(:delete_all)
+ association(:#{name}).reset
+ super
+ end
+ RUBY
+ }
+
+ hm_options = {}
+ hm_options[:through] = middle_reflection.name
+ hm_options[:source] = join_model.right_reflection.name
+
+ [:before_add, :after_add, :before_remove, :after_remove, :autosave, :validate, :join_table, :class_name, :extend].each do |k|
+ hm_options[k] = options[k] if options.key? k
+ end
+
+ ActiveSupport::Deprecation.silence { has_many name, scope, hm_options, &extension }
+ _reflections[name.to_s].parent_reflection = habtm_reflection
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/alias_tracker.rb b/activerecord/lib/active_record/associations/alias_tracker.rb
new file mode 100644
index 0000000000..096f016976
--- /dev/null
+++ b/activerecord/lib/active_record/associations/alias_tracker.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/string/conversions"
+
+module ActiveRecord
+ module Associations
+ # Keeps track of table aliases for ActiveRecord::Associations::JoinDependency
+ class AliasTracker # :nodoc:
+ def self.create(connection, initial_table)
+ aliases = Hash.new(0)
+ aliases[initial_table] = 1
+ new(connection, aliases)
+ end
+
+ def self.create_with_joins(connection, initial_table, joins)
+ if joins.empty?
+ create(connection, initial_table)
+ else
+ aliases = Hash.new { |h, k|
+ h[k] = initial_count_for(connection, k, joins)
+ }
+ aliases[initial_table] = 1
+ new(connection, aliases)
+ end
+ end
+
+ def self.initial_count_for(connection, name, table_joins)
+ # quoted_name should be downcased as some database adapters (Oracle) return quoted name in uppercase
+ quoted_name = connection.quote_table_name(name).downcase
+
+ counts = table_joins.map do |join|
+ if join.is_a?(Arel::Nodes::StringJoin)
+ # Table names + table aliases
+ join.left.downcase.scan(
+ /join(?:\s+\w+)?\s+(\S+\s+)?#{quoted_name}\son/
+ ).size
+ elsif join.respond_to? :left
+ join.left.table_name == name ? 1 : 0
+ else
+ # this branch is reached by two tests:
+ #
+ # activerecord/test/cases/associations/cascaded_eager_loading_test.rb:37
+ # with :posts
+ #
+ # activerecord/test/cases/associations/eager_test.rb:1133
+ # with :comments
+ #
+ 0
+ end
+ end
+
+ counts.sum
+ end
+
+ # table_joins is an array of arel joins which might conflict with the aliases we assign here
+ def initialize(connection, aliases)
+ @aliases = aliases
+ @connection = connection
+ end
+
+ def aliased_table_for(table_name, aliased_name, type_caster)
+ if aliases[table_name].zero?
+ # If it's zero, we can have our table_name
+ aliases[table_name] = 1
+ Arel::Table.new(table_name, type_caster: type_caster)
+ else
+ # Otherwise, we need to use an alias
+ aliased_name = @connection.table_alias_for(aliased_name)
+
+ # Update the count
+ aliases[aliased_name] += 1
+
+ table_alias = if aliases[aliased_name] > 1
+ "#{truncate(aliased_name)}_#{aliases[aliased_name]}"
+ else
+ aliased_name
+ end
+ Arel::Table.new(table_name, type_caster: type_caster).alias(table_alias)
+ end
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+ attr_reader :aliases
+
+ private
+
+ def truncate(name)
+ name.slice(0, @connection.table_alias_length - 2)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/association.rb b/activerecord/lib/active_record/associations/association.rb
new file mode 100644
index 0000000000..268b022ab8
--- /dev/null
+++ b/activerecord/lib/active_record/associations/association.rb
@@ -0,0 +1,284 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/array/wrap"
+
+module ActiveRecord
+ module Associations
+ # = Active Record Associations
+ #
+ # This is the root class of all associations ('+ Foo' signifies an included module Foo):
+ #
+ # Association
+ # SingularAssociation
+ # HasOneAssociation + ForeignAssociation
+ # HasOneThroughAssociation + ThroughAssociation
+ # BelongsToAssociation
+ # BelongsToPolymorphicAssociation
+ # CollectionAssociation
+ # HasManyAssociation + ForeignAssociation
+ # HasManyThroughAssociation + ThroughAssociation
+ class Association #:nodoc:
+ attr_reader :owner, :target, :reflection
+ attr_accessor :inversed
+
+ delegate :options, to: :reflection
+
+ def initialize(owner, reflection)
+ reflection.check_validity!
+
+ @owner, @reflection = owner, reflection
+
+ reset
+ reset_scope
+ end
+
+ # Resets the \loaded flag to +false+ and sets the \target to +nil+.
+ def reset
+ @loaded = false
+ @target = nil
+ @stale_state = nil
+ @inversed = false
+ end
+
+ # Reloads the \target and returns +self+ on success.
+ def reload
+ reset
+ reset_scope
+ load_target
+ self unless target.nil?
+ end
+
+ # Has the \target been already \loaded?
+ def loaded?
+ @loaded
+ end
+
+ # Asserts the \target has been loaded setting the \loaded flag to +true+.
+ def loaded!
+ @loaded = true
+ @stale_state = stale_state
+ @inversed = false
+ end
+
+ # The target is stale if the target no longer points to the record(s) that the
+ # relevant foreign_key(s) refers to. If stale, the association accessor method
+ # on the owner will reload the target. It's up to subclasses to implement the
+ # stale_state method if relevant.
+ #
+ # Note that if the target has not been loaded, it is not considered stale.
+ def stale_target?
+ !inversed && loaded? && @stale_state != stale_state
+ end
+
+ # Sets the target of this association to <tt>\target</tt>, and the \loaded flag to +true+.
+ def target=(target)
+ @target = target
+ loaded!
+ end
+
+ def scope
+ target_scope.merge!(association_scope)
+ end
+
+ # The scope for this association.
+ #
+ # Note that the association_scope is merged into the target_scope only when the
+ # scope method is called. This is because at that point the call may be surrounded
+ # by scope.scoping { ... } or with_scope { ... } etc, which affects the scope which
+ # actually gets built.
+ def association_scope
+ if klass
+ @association_scope ||= AssociationScope.scope(self)
+ end
+ end
+
+ def reset_scope
+ @association_scope = nil
+ end
+
+ # Set the inverse association, if possible
+ def set_inverse_instance(record)
+ if invertible_for?(record)
+ inverse = record.association(inverse_reflection_for(record).name)
+ inverse.target = owner
+ inverse.inversed = true
+ end
+ record
+ end
+
+ # Remove the inverse association, if possible
+ def remove_inverse_instance(record)
+ if invertible_for?(record)
+ inverse = record.association(inverse_reflection_for(record).name)
+ inverse.target = nil
+ inverse.inversed = false
+ end
+ end
+
+ # Returns the class of the target. belongs_to polymorphic overrides this to look at the
+ # polymorphic_type field on the owner.
+ def klass
+ reflection.klass
+ end
+
+ # Can be overridden (i.e. in ThroughAssociation) to merge in other scopes (i.e. the
+ # through association's scope)
+ def target_scope
+ AssociationRelation.create(klass, klass.arel_table, klass.predicate_builder, self).merge!(klass.all)
+ end
+
+ def extensions
+ extensions = klass.default_extensions | reflection.extensions
+
+ if scope = reflection.scope
+ extensions |= klass.unscoped.instance_exec(owner, &scope).extensions
+ end
+
+ extensions
+ end
+
+ # Loads the \target if needed and returns it.
+ #
+ # This method is abstract in the sense that it relies on +find_target+,
+ # which is expected to be provided by descendants.
+ #
+ # If the \target is already \loaded it is just returned. Thus, you can call
+ # +load_target+ unconditionally to get the \target.
+ #
+ # ActiveRecord::RecordNotFound is rescued within the method, and it is
+ # not reraised. The proxy is \reset and +nil+ is the return value.
+ def load_target
+ @target = find_target if (@stale_state && stale_target?) || find_target?
+
+ loaded! unless loaded?
+ target
+ rescue ActiveRecord::RecordNotFound
+ reset
+ end
+
+ # We can't dump @reflection since it contains the scope proc
+ def marshal_dump
+ ivars = (instance_variables - [:@reflection]).map { |name| [name, instance_variable_get(name)] }
+ [@reflection.name, ivars]
+ end
+
+ def marshal_load(data)
+ reflection_name, ivars = data
+ ivars.each { |name, val| instance_variable_set(name, val) }
+ @reflection = @owner.class._reflect_on_association(reflection_name)
+ end
+
+ def initialize_attributes(record, except_from_scope_attributes = nil) #:nodoc:
+ except_from_scope_attributes ||= {}
+ skip_assign = [reflection.foreign_key, reflection.type].compact
+ assigned_keys = record.changed_attribute_names_to_save
+ assigned_keys += except_from_scope_attributes.keys.map(&:to_s)
+ attributes = scope_for_create.except!(*(assigned_keys - skip_assign))
+ record.send(:_assign_attributes, attributes) if attributes.any?
+ set_inverse_instance(record)
+ end
+
+ def create(attributes = {}, &block)
+ _create_record(attributes, &block)
+ end
+
+ def create!(attributes = {}, &block)
+ _create_record(attributes, true, &block)
+ end
+
+ private
+ def scope_for_create
+ scope.scope_for_create
+ end
+
+ def find_target?
+ !loaded? && (!owner.new_record? || foreign_key_present?) && klass
+ end
+
+ def creation_attributes
+ attributes = {}
+
+ if (reflection.has_one? || reflection.collection?) && !options[:through]
+ attributes[reflection.foreign_key] = owner[reflection.active_record_primary_key]
+
+ if reflection.options[:as]
+ attributes[reflection.type] = owner.class.base_class.name
+ end
+ end
+
+ attributes
+ end
+
+ # Sets the owner attributes on the given record
+ def set_owner_attributes(record)
+ creation_attributes.each { |key, value| record[key] = value }
+ end
+
+ # Returns true if there is a foreign key present on the owner which
+ # references the target. This is used to determine whether we can load
+ # the target if the owner is currently a new record (and therefore
+ # without a key). If the owner is a new record then foreign_key must
+ # be present in order to load target.
+ #
+ # Currently implemented by belongs_to (vanilla and polymorphic) and
+ # has_one/has_many :through associations which go through a belongs_to.
+ def foreign_key_present?
+ false
+ end
+
+ # Raises ActiveRecord::AssociationTypeMismatch unless +record+ is of
+ # the kind of the class of the associated objects. Meant to be used as
+ # a sanity check when you are about to assign an associated record.
+ def raise_on_type_mismatch!(record)
+ unless record.is_a?(reflection.klass)
+ fresh_class = reflection.class_name.safe_constantize
+ unless fresh_class && record.is_a?(fresh_class)
+ message = "#{reflection.class_name}(##{reflection.klass.object_id}) expected, "\
+ "got #{record.inspect} which is an instance of #{record.class}(##{record.class.object_id})"
+ raise ActiveRecord::AssociationTypeMismatch, message
+ end
+ end
+ end
+
+ # Can be redefined by subclasses, notably polymorphic belongs_to
+ # The record parameter is necessary to support polymorphic inverses as we must check for
+ # the association in the specific class of the record.
+ def inverse_reflection_for(record)
+ reflection.inverse_of
+ end
+
+ # Returns true if inverse association on the given record needs to be set.
+ # This method is redefined by subclasses.
+ def invertible_for?(record)
+ foreign_key_for?(record) && inverse_reflection_for(record)
+ end
+
+ # Returns true if record contains the foreign_key
+ def foreign_key_for?(record)
+ record.has_attribute?(reflection.foreign_key)
+ end
+
+ # This should be implemented to return the values of the relevant key(s) on the owner,
+ # so that when stale_state is different from the value stored on the last find_target,
+ # the target is stale.
+ #
+ # This is only relevant to certain associations, which is why it returns +nil+ by default.
+ def stale_state
+ end
+
+ def build_record(attributes)
+ reflection.build_association(attributes) do |record|
+ initialize_attributes(record, attributes)
+ end
+ end
+
+ # Returns true if statement cache should be skipped on the association reader.
+ def skip_statement_cache?(scope)
+ reflection.has_scope? ||
+ scope.eager_loading? ||
+ klass.scope_attributes? ||
+ reflection.source_reflection.active_record.default_scopes.any?
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/association_scope.rb b/activerecord/lib/active_record/associations/association_scope.rb
new file mode 100644
index 0000000000..9b0b50977d
--- /dev/null
+++ b/activerecord/lib/active_record/associations/association_scope.rb
@@ -0,0 +1,173 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Associations
+ class AssociationScope #:nodoc:
+ def self.scope(association)
+ INSTANCE.scope(association)
+ end
+
+ def self.create(&block)
+ block ||= lambda { |val| val }
+ new(block)
+ end
+
+ def initialize(value_transformation)
+ @value_transformation = value_transformation
+ end
+
+ INSTANCE = create
+
+ def scope(association)
+ klass = association.klass
+ reflection = association.reflection
+ scope = klass.unscoped
+ owner = association.owner
+ alias_tracker = AliasTracker.create(klass.connection, klass.table_name)
+ chain_head, chain_tail = get_chain(reflection, association, alias_tracker)
+
+ scope.extending! reflection.extensions
+ add_constraints(scope, owner, reflection, chain_head, chain_tail)
+ end
+
+ def join_type
+ Arel::Nodes::InnerJoin
+ end
+
+ def self.get_bind_values(owner, chain)
+ binds = []
+ last_reflection = chain.last
+
+ binds << last_reflection.join_id_for(owner)
+ if last_reflection.type
+ binds << owner.class.base_class.name
+ end
+
+ chain.each_cons(2).each do |reflection, next_reflection|
+ if reflection.type
+ binds << next_reflection.klass.base_class.name
+ end
+ end
+ binds
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+
+ attr_reader :value_transformation
+
+ private
+ def join(table, constraint)
+ table.create_join(table, table.create_on(constraint), join_type)
+ end
+
+ def last_chain_scope(scope, table, reflection, owner)
+ join_keys = reflection.join_keys
+ key = join_keys.key
+ foreign_key = join_keys.foreign_key
+
+ value = transform_value(owner[foreign_key])
+ scope = scope.where(table.name => { key => value })
+
+ if reflection.type
+ polymorphic_type = transform_value(owner.class.base_class.name)
+ scope = scope.where(table.name => { reflection.type => polymorphic_type })
+ end
+
+ scope
+ end
+
+ def transform_value(value)
+ value_transformation.call(value)
+ end
+
+ def next_chain_scope(scope, table, reflection, foreign_table, next_reflection)
+ join_keys = reflection.join_keys
+ key = join_keys.key
+ foreign_key = join_keys.foreign_key
+
+ constraint = table[key].eq(foreign_table[foreign_key])
+
+ if reflection.type
+ value = transform_value(next_reflection.klass.base_class.name)
+ scope = scope.where(table.name => { reflection.type => value })
+ end
+
+ scope = scope.joins(join(foreign_table, constraint))
+ end
+
+ class ReflectionProxy < SimpleDelegator # :nodoc:
+ attr_accessor :next
+ attr_reader :alias_name
+
+ def initialize(reflection, alias_name)
+ super(reflection)
+ @alias_name = alias_name
+ end
+
+ def all_includes; nil; end
+ end
+
+ def get_chain(reflection, association, tracker)
+ name = reflection.name
+ runtime_reflection = Reflection::RuntimeReflection.new(reflection, association)
+ previous_reflection = runtime_reflection
+ reflection.chain.drop(1).each do |refl|
+ alias_name = tracker.aliased_table_for(
+ refl.table_name,
+ refl.alias_candidate(name),
+ refl.klass.type_caster
+ )
+ proxy = ReflectionProxy.new(refl, alias_name)
+ previous_reflection.next = proxy
+ previous_reflection = proxy
+ end
+ [runtime_reflection, previous_reflection]
+ end
+
+ def add_constraints(scope, owner, refl, chain_head, chain_tail)
+ owner_reflection = chain_tail
+ table = owner_reflection.alias_name
+ scope = last_chain_scope(scope, table, owner_reflection, owner)
+
+ reflection = chain_head
+ while reflection
+ table = reflection.alias_name
+ next_reflection = reflection.next
+
+ unless reflection == chain_tail
+ foreign_table = next_reflection.alias_name
+ scope = next_chain_scope(scope, table, reflection, foreign_table, next_reflection)
+ end
+
+ # Exclude the scope of the association itself, because that
+ # was already merged in the #scope method.
+ reflection.constraints.each do |scope_chain_item|
+ item = eval_scope(reflection, table, scope_chain_item, owner)
+
+ if scope_chain_item == refl.scope
+ scope.merge! item.except(:where, :includes)
+ end
+
+ reflection.all_includes do
+ scope.includes! item.includes_values
+ end
+
+ scope.unscope!(*item.unscope_values)
+ scope.where_clause += item.where_clause
+ scope.order_values |= item.order_values
+ end
+
+ reflection = next_reflection
+ end
+
+ scope
+ end
+
+ def eval_scope(reflection, table, scope, owner)
+ reflection.build_scope(table).instance_exec(owner, &scope)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/belongs_to_association.rb b/activerecord/lib/active_record/associations/belongs_to_association.rb
new file mode 100644
index 0000000000..7a9f5f7937
--- /dev/null
+++ b/activerecord/lib/active_record/associations/belongs_to_association.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record Belongs To Association
+ module Associations
+ class BelongsToAssociation < SingularAssociation #:nodoc:
+ def handle_dependency
+ target.send(options[:dependent]) if load_target
+ end
+
+ def replace(record)
+ if record
+ raise_on_type_mismatch!(record)
+ update_counters_on_replace(record)
+ replace_keys(record)
+ set_inverse_instance(record)
+ @updated = true
+ else
+ decrement_counters
+ remove_keys
+ end
+
+ self.target = record
+ end
+
+ def default(&block)
+ writer(owner.instance_exec(&block)) if reader.nil?
+ end
+
+ def reset
+ super
+ @updated = false
+ end
+
+ def updated?
+ @updated
+ end
+
+ def decrement_counters # :nodoc:
+ update_counters(-1)
+ end
+
+ def increment_counters # :nodoc:
+ update_counters(1)
+ end
+
+ private
+
+ def update_counters(by)
+ if require_counter_update? && foreign_key_present?
+ if target && !stale_target?
+ target.increment!(reflection.counter_cache_column, by)
+ else
+ klass.update_counters(target_id, reflection.counter_cache_column => by)
+ end
+ end
+ end
+
+ def find_target?
+ !loaded? && foreign_key_present? && klass
+ end
+
+ def require_counter_update?
+ reflection.counter_cache_column && owner.persisted?
+ end
+
+ def update_counters_on_replace(record)
+ if require_counter_update? && different_target?(record)
+ owner.instance_variable_set :@_after_replace_counter_called, true
+ record.increment!(reflection.counter_cache_column)
+ decrement_counters
+ end
+ end
+
+ # Checks whether record is different to the current target, without loading it
+ def different_target?(record)
+ record.id != owner._read_attribute(reflection.foreign_key)
+ end
+
+ def replace_keys(record)
+ owner[reflection.foreign_key] = record._read_attribute(reflection.association_primary_key(record.class))
+ end
+
+ def remove_keys
+ owner[reflection.foreign_key] = nil
+ end
+
+ def foreign_key_present?
+ owner._read_attribute(reflection.foreign_key)
+ end
+
+ # NOTE - for now, we're only supporting inverse setting from belongs_to back onto
+ # has_one associations.
+ def invertible_for?(record)
+ inverse = inverse_reflection_for(record)
+ inverse && inverse.has_one?
+ end
+
+ def target_id
+ if options[:primary_key]
+ owner.send(reflection.name).try(:id)
+ else
+ owner._read_attribute(reflection.foreign_key)
+ end
+ end
+
+ def stale_state
+ result = owner._read_attribute(reflection.foreign_key) { |n| owner.send(:missing_attribute, n, caller) }
+ result && result.to_s
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/belongs_to_polymorphic_association.rb b/activerecord/lib/active_record/associations/belongs_to_polymorphic_association.rb
new file mode 100644
index 0000000000..13b4a084ea
--- /dev/null
+++ b/activerecord/lib/active_record/associations/belongs_to_polymorphic_association.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record Belongs To Polymorphic Association
+ module Associations
+ class BelongsToPolymorphicAssociation < BelongsToAssociation #:nodoc:
+ def klass
+ type = owner[reflection.foreign_type]
+ type.presence && type.constantize
+ end
+
+ private
+
+ def replace_keys(record)
+ super
+ owner[reflection.foreign_type] = record.class.base_class.name
+ end
+
+ def remove_keys
+ super
+ owner[reflection.foreign_type] = nil
+ end
+
+ def different_target?(record)
+ super || record.class != klass
+ end
+
+ def inverse_reflection_for(record)
+ reflection.polymorphic_inverse_of(record.class)
+ end
+
+ def raise_on_type_mismatch!(record)
+ # A polymorphic association cannot have a type mismatch, by definition
+ end
+
+ def stale_state
+ foreign_key = super
+ foreign_key && [foreign_key.to_s, owner[reflection.foreign_type].to_s]
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/builder/association.rb b/activerecord/lib/active_record/associations/builder/association.rb
new file mode 100644
index 0000000000..496b16b58f
--- /dev/null
+++ b/activerecord/lib/active_record/associations/builder/association.rb
@@ -0,0 +1,145 @@
+# frozen_string_literal: true
+
+# This is the parent Association class which defines the variables
+# used by all associations.
+#
+# The hierarchy is defined as follows:
+# Association
+# - SingularAssociation
+# - BelongsToAssociation
+# - HasOneAssociation
+# - CollectionAssociation
+# - HasManyAssociation
+
+module ActiveRecord::Associations::Builder # :nodoc:
+ class Association #:nodoc:
+ class << self
+ attr_accessor :extensions
+ end
+ self.extensions = []
+
+ VALID_OPTIONS = [:class_name, :anonymous_class, :foreign_key, :validate] # :nodoc:
+
+ def self.build(model, name, scope, options, &block)
+ if model.dangerous_attribute_method?(name)
+ raise ArgumentError, "You tried to define an association named #{name} on the model #{model.name}, but " \
+ "this will conflict with a method #{name} already defined by Active Record. " \
+ "Please choose a different association name."
+ end
+
+ extension = define_extensions model, name, &block
+ reflection = create_reflection model, name, scope, options, extension
+ define_accessors model, reflection
+ define_callbacks model, reflection
+ define_validations model, reflection
+ reflection
+ end
+
+ def self.create_reflection(model, name, scope, options, extension = nil)
+ raise ArgumentError, "association names must be a Symbol" unless name.kind_of?(Symbol)
+
+ if scope.is_a?(Hash)
+ options = scope
+ scope = nil
+ end
+
+ validate_options(options)
+
+ scope = build_scope(scope, extension)
+
+ ActiveRecord::Reflection.create(macro, name, scope, options, model)
+ end
+
+ def self.build_scope(scope, extension)
+ new_scope = scope
+
+ if scope && scope.arity == 0
+ new_scope = proc { instance_exec(&scope) }
+ end
+
+ if extension
+ new_scope = wrap_scope new_scope, extension
+ end
+
+ new_scope
+ end
+
+ def self.wrap_scope(scope, extension)
+ scope
+ end
+
+ def self.macro
+ raise NotImplementedError
+ end
+
+ def self.valid_options(options)
+ VALID_OPTIONS + Association.extensions.flat_map(&:valid_options)
+ end
+
+ def self.validate_options(options)
+ options.assert_valid_keys(valid_options(options))
+ end
+
+ def self.define_extensions(model, name)
+ end
+
+ def self.define_callbacks(model, reflection)
+ if dependent = reflection.options[:dependent]
+ check_dependent_options(dependent)
+ add_destroy_callbacks(model, reflection)
+ end
+
+ Association.extensions.each do |extension|
+ extension.build model, reflection
+ end
+ end
+
+ # Defines the setter and getter methods for the association
+ # class Post < ActiveRecord::Base
+ # has_many :comments
+ # end
+ #
+ # Post.first.comments and Post.first.comments= methods are defined by this method...
+ def self.define_accessors(model, reflection)
+ mixin = model.generated_association_methods
+ name = reflection.name
+ define_readers(mixin, name)
+ define_writers(mixin, name)
+ end
+
+ def self.define_readers(mixin, name)
+ mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def #{name}(*args)
+ association(:#{name}).reader(*args)
+ end
+ CODE
+ end
+
+ def self.define_writers(mixin, name)
+ mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def #{name}=(value)
+ association(:#{name}).writer(value)
+ end
+ CODE
+ end
+
+ def self.define_validations(model, reflection)
+ # noop
+ end
+
+ def self.valid_dependent_options
+ raise NotImplementedError
+ end
+
+ def self.check_dependent_options(dependent)
+ unless valid_dependent_options.include? dependent
+ raise ArgumentError, "The :dependent option must be one of #{valid_dependent_options}, but is :#{dependent}"
+ end
+ end
+
+ def self.add_destroy_callbacks(model, reflection)
+ name = reflection.name
+ model.before_destroy lambda { |o| o.association(name).handle_dependency }
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/builder/belongs_to.rb b/activerecord/lib/active_record/associations/builder/belongs_to.rb
new file mode 100644
index 0000000000..9904ee4bed
--- /dev/null
+++ b/activerecord/lib/active_record/associations/builder/belongs_to.rb
@@ -0,0 +1,150 @@
+# frozen_string_literal: true
+
+module ActiveRecord::Associations::Builder # :nodoc:
+ class BelongsTo < SingularAssociation #:nodoc:
+ def self.macro
+ :belongs_to
+ end
+
+ def self.valid_options(options)
+ super + [:polymorphic, :touch, :counter_cache, :optional, :default]
+ end
+
+ def self.valid_dependent_options
+ [:destroy, :delete]
+ end
+
+ def self.define_callbacks(model, reflection)
+ super
+ add_counter_cache_callbacks(model, reflection) if reflection.options[:counter_cache]
+ add_touch_callbacks(model, reflection) if reflection.options[:touch]
+ add_default_callbacks(model, reflection) if reflection.options[:default]
+ end
+
+ def self.define_accessors(mixin, reflection)
+ super
+ add_counter_cache_methods mixin
+ end
+
+ def self.add_counter_cache_methods(mixin)
+ return if mixin.method_defined? :belongs_to_counter_cache_after_update
+
+ mixin.class_eval do
+ def belongs_to_counter_cache_after_update(reflection)
+ foreign_key = reflection.foreign_key
+ cache_column = reflection.counter_cache_column
+
+ if (@_after_replace_counter_called ||= false)
+ @_after_replace_counter_called = false
+ elsif saved_change_to_attribute?(foreign_key) && !new_record?
+ if reflection.polymorphic?
+ model = attribute_in_database(reflection.foreign_type).try(:constantize)
+ model_was = attribute_before_last_save(reflection.foreign_type).try(:constantize)
+ else
+ model = reflection.klass
+ model_was = reflection.klass
+ end
+
+ foreign_key_was = attribute_before_last_save foreign_key
+ foreign_key = attribute_in_database foreign_key
+
+ if foreign_key && model.respond_to?(:increment_counter)
+ model.increment_counter(cache_column, foreign_key)
+ end
+
+ if foreign_key_was && model_was.respond_to?(:decrement_counter)
+ model_was.decrement_counter(cache_column, foreign_key_was)
+ end
+ end
+ end
+ end
+ end
+
+ def self.add_counter_cache_callbacks(model, reflection)
+ cache_column = reflection.counter_cache_column
+
+ model.after_update lambda { |record|
+ record.belongs_to_counter_cache_after_update(reflection)
+ }
+
+ klass = reflection.class_name.safe_constantize
+ klass.attr_readonly cache_column if klass && klass.respond_to?(:attr_readonly)
+ end
+
+ def self.touch_record(o, changes, foreign_key, name, touch, touch_method) # :nodoc:
+ old_foreign_id = changes[foreign_key] && changes[foreign_key].first
+
+ if old_foreign_id
+ association = o.association(name)
+ reflection = association.reflection
+ if reflection.polymorphic?
+ foreign_type = reflection.foreign_type
+ klass = changes[foreign_type] && changes[foreign_type].first || o.public_send(foreign_type)
+ klass = klass.constantize
+ else
+ klass = association.klass
+ end
+ old_record = klass.find_by(klass.primary_key => old_foreign_id)
+
+ if old_record
+ if touch != true
+ old_record.send(touch_method, touch)
+ else
+ old_record.send(touch_method)
+ end
+ end
+ end
+
+ record = o.send name
+ if record && record.persisted?
+ if touch != true
+ record.send(touch_method, touch)
+ else
+ record.send(touch_method)
+ end
+ end
+ end
+
+ def self.add_touch_callbacks(model, reflection)
+ foreign_key = reflection.foreign_key
+ n = reflection.name
+ touch = reflection.options[:touch]
+
+ callback = lambda { |changes_method| lambda { |record|
+ BelongsTo.touch_record(record, record.send(changes_method), foreign_key, n, touch, belongs_to_touch_method)
+ }}
+
+ model.after_save callback.(:saved_changes), if: :saved_changes?
+ model.after_touch callback.(:changes_to_save)
+ model.after_destroy callback.(:changes_to_save)
+ end
+
+ def self.add_default_callbacks(model, reflection)
+ model.before_validation lambda { |o|
+ o.association(reflection.name).default(&reflection.options[:default])
+ }
+ end
+
+ def self.add_destroy_callbacks(model, reflection)
+ model.after_destroy lambda { |o| o.association(reflection.name).handle_dependency }
+ end
+
+ def self.define_validations(model, reflection)
+ if reflection.options.key?(:required)
+ reflection.options[:optional] = !reflection.options.delete(:required)
+ end
+
+ if reflection.options[:optional].nil?
+ required = model.belongs_to_required_by_default
+ else
+ required = !reflection.options[:optional]
+ end
+
+ super
+
+ if required
+ model.validates_presence_of reflection.name, message: :required
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/builder/collection_association.rb b/activerecord/lib/active_record/associations/builder/collection_association.rb
new file mode 100644
index 0000000000..753fde5146
--- /dev/null
+++ b/activerecord/lib/active_record/associations/builder/collection_association.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require_relative "../../associations"
+
+module ActiveRecord::Associations::Builder # :nodoc:
+ class CollectionAssociation < Association #:nodoc:
+ CALLBACKS = [:before_add, :after_add, :before_remove, :after_remove]
+
+ def self.valid_options(options)
+ super + [:table_name, :before_add,
+ :after_add, :before_remove, :after_remove, :extend]
+ end
+
+ def self.define_callbacks(model, reflection)
+ super
+ name = reflection.name
+ options = reflection.options
+ CALLBACKS.each { |callback_name|
+ define_callback(model, callback_name, name, options)
+ }
+ end
+
+ def self.define_extensions(model, name)
+ if block_given?
+ extension_module_name = "#{model.name.demodulize}#{name.to_s.camelize}AssociationExtension"
+ extension = Module.new(&Proc.new)
+ model.parent.const_set(extension_module_name, extension)
+ end
+ end
+
+ def self.define_callback(model, callback_name, name, options)
+ full_callback_name = "#{callback_name}_for_#{name}"
+
+ # TODO : why do i need method_defined? I think its because of the inheritance chain
+ model.class_attribute full_callback_name unless model.method_defined?(full_callback_name)
+ callbacks = Array(options[callback_name.to_sym]).map do |callback|
+ case callback
+ when Symbol
+ ->(method, owner, record) { owner.send(callback, record) }
+ when Proc
+ ->(method, owner, record) { callback.call(owner, record) }
+ else
+ ->(method, owner, record) { callback.send(method, owner, record) }
+ end
+ end
+ model.send "#{full_callback_name}=", callbacks
+ end
+
+ # Defines the setter and getter methods for the collection_singular_ids.
+ def self.define_readers(mixin, name)
+ super
+
+ mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def #{name.to_s.singularize}_ids
+ association(:#{name}).ids_reader
+ end
+ CODE
+ end
+
+ def self.define_writers(mixin, name)
+ super
+
+ mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def #{name.to_s.singularize}_ids=(ids)
+ association(:#{name}).ids_writer(ids)
+ end
+ CODE
+ end
+
+ def self.wrap_scope(scope, mod)
+ if scope
+ if scope.arity > 0
+ proc { |owner| instance_exec(owner, &scope).extending(mod) }
+ else
+ proc { instance_exec(&scope).extending(mod) }
+ end
+ else
+ proc { extending(mod) }
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/builder/has_and_belongs_to_many.rb b/activerecord/lib/active_record/associations/builder/has_and_belongs_to_many.rb
new file mode 100644
index 0000000000..12fcfbcd45
--- /dev/null
+++ b/activerecord/lib/active_record/associations/builder/has_and_belongs_to_many.rb
@@ -0,0 +1,135 @@
+# frozen_string_literal: true
+
+module ActiveRecord::Associations::Builder # :nodoc:
+ class HasAndBelongsToMany # :nodoc:
+ class JoinTableResolver # :nodoc:
+ KnownTable = Struct.new :join_table
+
+ class KnownClass # :nodoc:
+ def initialize(lhs_class, rhs_class_name)
+ @lhs_class = lhs_class
+ @rhs_class_name = rhs_class_name
+ @join_table = nil
+ end
+
+ def join_table
+ @join_table ||= [@lhs_class.table_name, klass.table_name].sort.join("\0").gsub(/^(.*[._])(.+)\0\1(.+)/, '\1\2_\3').tr("\0", "_")
+ end
+
+ private
+
+ def klass
+ @lhs_class.send(:compute_type, @rhs_class_name)
+ end
+ end
+
+ def self.build(lhs_class, name, options)
+ if options[:join_table]
+ KnownTable.new options[:join_table].to_s
+ else
+ class_name = options.fetch(:class_name) {
+ name.to_s.camelize.singularize
+ }
+ KnownClass.new lhs_class, class_name.to_s
+ end
+ end
+ end
+
+ attr_reader :lhs_model, :association_name, :options
+
+ def initialize(association_name, lhs_model, options)
+ @association_name = association_name
+ @lhs_model = lhs_model
+ @options = options
+ end
+
+ def through_model
+ habtm = JoinTableResolver.build lhs_model, association_name, options
+
+ join_model = Class.new(ActiveRecord::Base) {
+ class << self;
+ attr_accessor :left_model
+ attr_accessor :name
+ attr_accessor :table_name_resolver
+ attr_accessor :left_reflection
+ attr_accessor :right_reflection
+ end
+
+ def self.table_name
+ table_name_resolver.join_table
+ end
+
+ def self.compute_type(class_name)
+ left_model.compute_type class_name
+ end
+
+ def self.add_left_association(name, options)
+ belongs_to name, required: false, **options
+ self.left_reflection = _reflect_on_association(name)
+ end
+
+ def self.add_right_association(name, options)
+ rhs_name = name.to_s.singularize.to_sym
+ belongs_to rhs_name, required: false, **options
+ self.right_reflection = _reflect_on_association(rhs_name)
+ end
+
+ def self.retrieve_connection
+ left_model.retrieve_connection
+ end
+
+ private
+
+ def self.suppress_composite_primary_key(pk)
+ pk unless pk.is_a?(Array)
+ end
+ }
+
+ join_model.name = "HABTM_#{association_name.to_s.camelize}"
+ join_model.table_name_resolver = habtm
+ join_model.left_model = lhs_model
+
+ join_model.add_left_association :left_side, anonymous_class: lhs_model
+ join_model.add_right_association association_name, belongs_to_options(options)
+ join_model
+ end
+
+ def middle_reflection(join_model)
+ middle_name = [lhs_model.name.downcase.pluralize,
+ association_name].join("_".freeze).gsub("::".freeze, "_".freeze).to_sym
+ middle_options = middle_options join_model
+
+ HasMany.create_reflection(lhs_model,
+ middle_name,
+ nil,
+ middle_options)
+ end
+
+ private
+
+ def middle_options(join_model)
+ middle_options = {}
+ middle_options[:class_name] = "#{lhs_model.name}::#{join_model.name}"
+ middle_options[:source] = join_model.left_reflection.name
+ if options.key? :foreign_key
+ middle_options[:foreign_key] = options[:foreign_key]
+ end
+ middle_options
+ end
+
+ def belongs_to_options(options)
+ rhs_options = {}
+
+ if options.key? :class_name
+ rhs_options[:foreign_key] = options[:class_name].to_s.foreign_key
+ rhs_options[:class_name] = options[:class_name]
+ end
+
+ if options.key? :association_foreign_key
+ rhs_options[:foreign_key] = options[:association_foreign_key]
+ end
+
+ rhs_options
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/builder/has_many.rb b/activerecord/lib/active_record/associations/builder/has_many.rb
new file mode 100644
index 0000000000..5b9617bc6d
--- /dev/null
+++ b/activerecord/lib/active_record/associations/builder/has_many.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module ActiveRecord::Associations::Builder # :nodoc:
+ class HasMany < CollectionAssociation #:nodoc:
+ def self.macro
+ :has_many
+ end
+
+ def self.valid_options(options)
+ super + [:primary_key, :dependent, :as, :through, :source, :source_type, :inverse_of, :counter_cache, :join_table, :foreign_type, :index_errors]
+ end
+
+ def self.valid_dependent_options
+ [:destroy, :delete_all, :nullify, :restrict_with_error, :restrict_with_exception]
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/builder/has_one.rb b/activerecord/lib/active_record/associations/builder/has_one.rb
new file mode 100644
index 0000000000..bfb37d6eee
--- /dev/null
+++ b/activerecord/lib/active_record/associations/builder/has_one.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module ActiveRecord::Associations::Builder # :nodoc:
+ class HasOne < SingularAssociation #:nodoc:
+ def self.macro
+ :has_one
+ end
+
+ def self.valid_options(options)
+ valid = super + [:as]
+ valid += [:through, :source, :source_type] if options[:through]
+ valid
+ end
+
+ def self.valid_dependent_options
+ [:destroy, :delete, :nullify, :restrict_with_error, :restrict_with_exception]
+ end
+
+ def self.add_destroy_callbacks(model, reflection)
+ super unless reflection.options[:through]
+ end
+
+ def self.define_validations(model, reflection)
+ super
+ if reflection.options[:required]
+ model.validates_presence_of reflection.name, message: :required
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/builder/singular_association.rb b/activerecord/lib/active_record/associations/builder/singular_association.rb
new file mode 100644
index 0000000000..0a02ef4cc1
--- /dev/null
+++ b/activerecord/lib/active_record/associations/builder/singular_association.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+# This class is inherited by the has_one and belongs_to association classes
+
+module ActiveRecord::Associations::Builder # :nodoc:
+ class SingularAssociation < Association #:nodoc:
+ def self.valid_options(options)
+ super + [:foreign_type, :dependent, :primary_key, :inverse_of, :required]
+ end
+
+ def self.define_accessors(model, reflection)
+ super
+ mixin = model.generated_association_methods
+ name = reflection.name
+
+ define_constructors(mixin, name) if reflection.constructable?
+
+ mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def reload_#{name}
+ association(:#{name}).force_reload_reader
+ end
+ CODE
+ end
+
+ # Defines the (build|create)_association methods for belongs_to or has_one association
+ def self.define_constructors(mixin, name)
+ mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def build_#{name}(*args, &block)
+ association(:#{name}).build(*args, &block)
+ end
+
+ def create_#{name}(*args, &block)
+ association(:#{name}).create(*args, &block)
+ end
+
+ def create_#{name}!(*args, &block)
+ association(:#{name}).create!(*args, &block)
+ end
+ CODE
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/collection_association.rb b/activerecord/lib/active_record/associations/collection_association.rb
new file mode 100644
index 0000000000..ed2e6d1ae4
--- /dev/null
+++ b/activerecord/lib/active_record/associations/collection_association.rb
@@ -0,0 +1,506 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Associations
+ # = Active Record Association Collection
+ #
+ # CollectionAssociation is an abstract class that provides common stuff to
+ # ease the implementation of association proxies that represent
+ # collections. See the class hierarchy in Association.
+ #
+ # CollectionAssociation:
+ # HasManyAssociation => has_many
+ # HasManyThroughAssociation + ThroughAssociation => has_many :through
+ #
+ # The CollectionAssociation class provides common methods to the collections
+ # defined by +has_and_belongs_to_many+, +has_many+ or +has_many+ with
+ # the +:through association+ option.
+ #
+ # You need to be careful with assumptions regarding the target: The proxy
+ # does not fetch records from the database until it needs them, but new
+ # ones created with +build+ are added to the target. So, the target may be
+ # non-empty and still lack children waiting to be read from the database.
+ # If you look directly to the database you cannot assume that's the entire
+ # collection because new records may have been added to the target, etc.
+ #
+ # If you need to work on all current children, new and existing records,
+ # +load_target+ and the +loaded+ flag are your friends.
+ class CollectionAssociation < Association #:nodoc:
+ # Implements the reader method, e.g. foo.items for Foo.has_many :items
+ def reader
+ if stale_target?
+ reload
+ end
+
+ @proxy ||= CollectionProxy.create(klass, self)
+ @proxy.reset_scope
+ end
+
+ # Implements the writer method, e.g. foo.items= for Foo.has_many :items
+ def writer(records)
+ replace(records)
+ end
+
+ # Implements the ids reader method, e.g. foo.item_ids for Foo.has_many :items
+ def ids_reader
+ if loaded?
+ target.pluck(reflection.association_primary_key)
+ else
+ @association_ids ||= scope.pluck(reflection.association_primary_key)
+ end
+ end
+
+ # Implements the ids writer method, e.g. foo.item_ids= for Foo.has_many :items
+ def ids_writer(ids)
+ pk_type = reflection.association_primary_key_type
+ ids = Array(ids).reject(&:blank?)
+ ids.map! { |i| pk_type.cast(i) }
+ records = klass.where(reflection.association_primary_key => ids).index_by do |r|
+ r.send(reflection.association_primary_key)
+ end.values_at(*ids).compact
+ if records.size != ids.size
+ klass.all.raise_record_not_found_exception!(ids, records.size, ids.size, reflection.association_primary_key)
+ else
+ replace(records)
+ end
+ end
+
+ def reset
+ super
+ @target = []
+ @association_ids = nil
+ end
+
+ def find(*args)
+ if block_given?
+ load_target.find(*args) { |*block_args| yield(*block_args) }
+ else
+ if options[:inverse_of] && loaded?
+ args_flatten = args.flatten
+ raise RecordNotFound, "Couldn't find #{scope.klass.name} without an ID" if args_flatten.blank?
+ result = find_by_scan(*args)
+
+ result_size = Array(result).size
+ if !result || result_size != args_flatten.size
+ scope.raise_record_not_found_exception!(args_flatten, result_size, args_flatten.size)
+ else
+ result
+ end
+ else
+ scope.find(*args)
+ end
+ end
+ end
+
+ def build(attributes = {}, &block)
+ if attributes.is_a?(Array)
+ attributes.collect { |attr| build(attr, &block) }
+ else
+ add_to_target(build_record(attributes)) do |record|
+ yield(record) if block_given?
+ end
+ end
+ end
+
+ # Add +records+ to this association. Returns +self+ so method calls may
+ # be chained. Since << flattens its argument list and inserts each record,
+ # +push+ and +concat+ behave identically.
+ def concat(*records)
+ records = records.flatten
+ if owner.new_record?
+ load_target
+ concat_records(records)
+ else
+ transaction { concat_records(records) }
+ end
+ end
+
+ # Starts a transaction in the association class's database connection.
+ #
+ # class Author < ActiveRecord::Base
+ # has_many :books
+ # end
+ #
+ # Author.first.books.transaction do
+ # # same effect as calling Book.transaction
+ # end
+ def transaction(*args)
+ reflection.klass.transaction(*args) do
+ yield
+ end
+ end
+
+ # Removes all records from the association without calling callbacks
+ # on the associated records. It honors the +:dependent+ option. However
+ # if the +:dependent+ value is +:destroy+ then in that case the +:delete_all+
+ # deletion strategy for the association is applied.
+ #
+ # You can force a particular deletion strategy by passing a parameter.
+ #
+ # Example:
+ #
+ # @author.books.delete_all(:nullify)
+ # @author.books.delete_all(:delete_all)
+ #
+ # See delete for more info.
+ def delete_all(dependent = nil)
+ if dependent && ![:nullify, :delete_all].include?(dependent)
+ raise ArgumentError, "Valid values are :nullify or :delete_all"
+ end
+
+ dependent = if dependent
+ dependent
+ elsif options[:dependent] == :destroy
+ :delete_all
+ else
+ options[:dependent]
+ end
+
+ delete_or_nullify_all_records(dependent).tap do
+ reset
+ loaded!
+ end
+ end
+
+ # Destroy all the records from this association.
+ #
+ # See destroy for more info.
+ def destroy_all
+ destroy(load_target).tap do
+ reset
+ loaded!
+ end
+ end
+
+ # Removes +records+ from this association calling +before_remove+ and
+ # +after_remove+ callbacks.
+ #
+ # This method is abstract in the sense that +delete_records+ has to be
+ # provided by descendants. Note this method does not imply the records
+ # are actually removed from the database, that depends precisely on
+ # +delete_records+. They are in any case removed from the collection.
+ def delete(*records)
+ return if records.empty?
+ records = find(records) if records.any? { |record| record.kind_of?(Integer) || record.kind_of?(String) }
+ delete_or_destroy(records, options[:dependent])
+ end
+
+ # Deletes the +records+ and removes them from this association calling
+ # +before_remove+ , +after_remove+ , +before_destroy+ and +after_destroy+ callbacks.
+ #
+ # Note that this method removes records from the database ignoring the
+ # +:dependent+ option.
+ def destroy(*records)
+ return if records.empty?
+ records = find(records) if records.any? { |record| record.kind_of?(Integer) || record.kind_of?(String) }
+ delete_or_destroy(records, :destroy)
+ end
+
+ # Returns the size of the collection by executing a SELECT COUNT(*)
+ # query if the collection hasn't been loaded, and calling
+ # <tt>collection.size</tt> if it has.
+ #
+ # If the collection has been already loaded +size+ and +length+ are
+ # equivalent. If not and you are going to need the records anyway
+ # +length+ will take one less query. Otherwise +size+ is more efficient.
+ #
+ # This method is abstract in the sense that it relies on
+ # +count_records+, which is a method descendants have to provide.
+ def size
+ if !find_target? || loaded?
+ target.size
+ elsif !association_scope.group_values.empty?
+ load_target.size
+ elsif !association_scope.distinct_value && target.is_a?(Array)
+ unsaved_records = target.select(&:new_record?)
+ unsaved_records.size + count_records
+ else
+ count_records
+ end
+ end
+
+ # Returns true if the collection is empty.
+ #
+ # If the collection has been loaded
+ # it is equivalent to <tt>collection.size.zero?</tt>. If the
+ # collection has not been loaded, it is equivalent to
+ # <tt>collection.exists?</tt>. If the collection has not already been
+ # loaded and you are going to fetch the records anyway it is better to
+ # check <tt>collection.length.zero?</tt>.
+ def empty?
+ if loaded?
+ size.zero?
+ else
+ @target.blank? && !scope.exists?
+ end
+ end
+
+ # Replace this collection with +other_array+. This will perform a diff
+ # and delete/add only records that have changed.
+ def replace(other_array)
+ other_array.each { |val| raise_on_type_mismatch!(val) }
+ original_target = load_target.dup
+
+ if owner.new_record?
+ replace_records(other_array, original_target)
+ else
+ replace_common_records_in_memory(other_array, original_target)
+ if other_array != original_target
+ transaction { replace_records(other_array, original_target) }
+ else
+ other_array
+ end
+ end
+ end
+
+ def include?(record)
+ if record.is_a?(reflection.klass)
+ if record.new_record?
+ include_in_memory?(record)
+ else
+ loaded? ? target.include?(record) : scope.exists?(record.id)
+ end
+ else
+ false
+ end
+ end
+
+ def load_target
+ if find_target?
+ @target = merge_target_lists(find_target, target)
+ end
+
+ loaded!
+ target
+ end
+
+ def add_to_target(record, skip_callbacks = false, &block)
+ if association_scope.distinct_value
+ index = @target.index(record)
+ end
+ replace_on_target(record, index, skip_callbacks, &block)
+ end
+
+ def scope
+ scope = super
+ scope.none! if null_scope?
+ scope
+ end
+
+ def null_scope?
+ owner.new_record? && !foreign_key_present?
+ end
+
+ def find_from_target?
+ loaded? ||
+ owner.new_record? ||
+ target.any? { |record| record.new_record? || record.changed? }
+ end
+
+ private
+
+ def find_target
+ scope = self.scope
+ return scope.to_a if skip_statement_cache?(scope)
+
+ conn = klass.connection
+ sc = reflection.association_scope_cache(conn, owner) do
+ StatementCache.create(conn) { |params|
+ as = AssociationScope.create { params.bind }
+ target_scope.merge!(as.scope(self))
+ }
+ end
+
+ binds = AssociationScope.get_bind_values(owner, reflection.chain)
+ sc.execute(binds, klass, conn) do |record|
+ set_inverse_instance(record)
+ end
+ end
+
+ # We have some records loaded from the database (persisted) and some that are
+ # in-memory (memory). The same record may be represented in the persisted array
+ # and in the memory array.
+ #
+ # So the task of this method is to merge them according to the following rules:
+ #
+ # * The final array must not have duplicates
+ # * The order of the persisted array is to be preserved
+ # * Any changes made to attributes on objects in the memory array are to be preserved
+ # * Otherwise, attributes should have the value found in the database
+ def merge_target_lists(persisted, memory)
+ return persisted if memory.empty?
+ return memory if persisted.empty?
+
+ persisted.map! do |record|
+ if mem_record = memory.delete(record)
+
+ ((record.attribute_names & mem_record.attribute_names) - mem_record.changed_attribute_names_to_save).each do |name|
+ mem_record[name] = record[name]
+ end
+
+ mem_record
+ else
+ record
+ end
+ end
+
+ persisted + memory
+ end
+
+ def _create_record(attributes, raise = false, &block)
+ unless owner.persisted?
+ raise ActiveRecord::RecordNotSaved, "You cannot call create unless the parent is saved"
+ end
+
+ if attributes.is_a?(Array)
+ attributes.collect { |attr| _create_record(attr, raise, &block) }
+ else
+ transaction do
+ add_to_target(build_record(attributes)) do |record|
+ yield(record) if block_given?
+ insert_record(record, true, raise) {
+ @_was_loaded = loaded?
+ @association_ids = nil
+ }
+ end
+ end
+ end
+ end
+
+ # Do the relevant stuff to insert the given record into the association collection.
+ def insert_record(record, validate = true, raise = false, &block)
+ if raise
+ record.save!(validate: validate, &block)
+ else
+ record.save(validate: validate, &block)
+ end
+ end
+
+ def delete_or_destroy(records, method)
+ records = records.flatten
+ records.each { |record| raise_on_type_mismatch!(record) }
+ existing_records = records.reject(&:new_record?)
+
+ if existing_records.empty?
+ remove_records(existing_records, records, method)
+ else
+ transaction { remove_records(existing_records, records, method) }
+ end
+ end
+
+ def remove_records(existing_records, records, method)
+ records.each { |record| callback(:before_remove, record) }
+
+ delete_records(existing_records, method) if existing_records.any?
+ records.each { |record| target.delete(record) }
+
+ records.each { |record| callback(:after_remove, record) }
+ end
+
+ # Delete the given records from the association,
+ # using one of the methods +:destroy+, +:delete_all+
+ # or +:nullify+ (or +nil+, in which case a default is used).
+ def delete_records(records, method)
+ raise NotImplementedError
+ end
+
+ def replace_records(new_target, original_target)
+ delete(target - new_target)
+
+ unless concat(new_target - target)
+ @target = original_target
+ raise RecordNotSaved, "Failed to replace #{reflection.name} because one or more of the " \
+ "new records could not be saved."
+ end
+
+ target
+ end
+
+ def replace_common_records_in_memory(new_target, original_target)
+ common_records = new_target & original_target
+ common_records.each do |record|
+ skip_callbacks = true
+ replace_on_target(record, @target.index(record), skip_callbacks)
+ end
+ end
+
+ def concat_records(records, raise = false)
+ result = true
+
+ records.each do |record|
+ raise_on_type_mismatch!(record)
+ add_to_target(record) do
+ unless owner.new_record?
+ result &&= insert_record(record, true, raise) {
+ @_was_loaded = loaded?
+ @association_ids = nil
+ }
+ end
+ end
+ end
+
+ result && records
+ end
+
+ def replace_on_target(record, index, skip_callbacks)
+ callback(:before_add, record) unless skip_callbacks
+
+ set_inverse_instance(record)
+
+ @_was_loaded = true
+
+ yield(record) if block_given?
+
+ if index
+ target[index] = record
+ elsif @_was_loaded || !loaded?
+ target << record
+ end
+
+ callback(:after_add, record) unless skip_callbacks
+
+ record
+ ensure
+ @_was_loaded = nil
+ end
+
+ def callback(method, record)
+ callbacks_for(method).each do |callback|
+ callback.call(method, owner, record)
+ end
+ end
+
+ def callbacks_for(callback_name)
+ full_callback_name = "#{callback_name}_for_#{reflection.name}"
+ owner.class.send(full_callback_name)
+ end
+
+ def include_in_memory?(record)
+ if reflection.is_a?(ActiveRecord::Reflection::ThroughReflection)
+ assoc = owner.association(reflection.through_reflection.name)
+ assoc.reader.any? { |source|
+ target_reflection = source.send(reflection.source_reflection.name)
+ target_reflection.respond_to?(:include?) ? target_reflection.include?(record) : target_reflection == record
+ } || target.include?(record)
+ else
+ target.include?(record)
+ end
+ end
+
+ # If the :inverse_of option has been
+ # specified, then #find scans the entire collection.
+ def find_by_scan(*args)
+ expects_array = args.first.kind_of?(Array)
+ ids = args.flatten.compact.map(&:to_s).uniq
+
+ if ids.size == 1
+ id = ids.first
+ record = load_target.detect { |r| id == r.id.to_s }
+ expects_array ? [ record ] : record
+ else
+ load_target.select { |r| ids.include?(r.id.to_s) }
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/collection_proxy.rb b/activerecord/lib/active_record/associations/collection_proxy.rb
new file mode 100644
index 0000000000..0678b07699
--- /dev/null
+++ b/activerecord/lib/active_record/associations/collection_proxy.rb
@@ -0,0 +1,1158 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Associations
+ # Association proxies in Active Record are middlemen between the object that
+ # holds the association, known as the <tt>@owner</tt>, and the actual associated
+ # object, known as the <tt>@target</tt>. The kind of association any proxy is
+ # about is available in <tt>@reflection</tt>. That's an instance of the class
+ # ActiveRecord::Reflection::AssociationReflection.
+ #
+ # For example, given
+ #
+ # class Blog < ActiveRecord::Base
+ # has_many :posts
+ # end
+ #
+ # blog = Blog.first
+ #
+ # the association proxy in <tt>blog.posts</tt> has the object in +blog+ as
+ # <tt>@owner</tt>, the collection of its posts as <tt>@target</tt>, and
+ # the <tt>@reflection</tt> object represents a <tt>:has_many</tt> macro.
+ #
+ # This class delegates unknown methods to <tt>@target</tt> via
+ # <tt>method_missing</tt>.
+ #
+ # The <tt>@target</tt> object is not \loaded until needed. For example,
+ #
+ # blog.posts.count
+ #
+ # is computed directly through SQL and does not trigger by itself the
+ # instantiation of the actual post records.
+ class CollectionProxy < Relation
+ def initialize(klass, association) #:nodoc:
+ @association = association
+ super klass, klass.arel_table, klass.predicate_builder
+
+ extensions = association.extensions
+ extend(*extensions) if extensions.any?
+ end
+
+ def target
+ @association.target
+ end
+
+ def load_target
+ @association.load_target
+ end
+
+ # Returns +true+ if the association has been loaded, otherwise +false+.
+ #
+ # person.pets.loaded? # => false
+ # person.pets
+ # person.pets.loaded? # => true
+ def loaded?
+ @association.loaded?
+ end
+
+ ##
+ # :method: select
+ #
+ # :call-seq:
+ # select(*fields, &block)
+ #
+ # Works in two ways.
+ #
+ # *First:* Specify a subset of fields to be selected from the result set.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.select(:name)
+ # # => [
+ # # #<Pet id: nil, name: "Fancy-Fancy">,
+ # # #<Pet id: nil, name: "Spook">,
+ # # #<Pet id: nil, name: "Choo-Choo">
+ # # ]
+ #
+ # person.pets.select(:id, :name)
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy">,
+ # # #<Pet id: 2, name: "Spook">,
+ # # #<Pet id: 3, name: "Choo-Choo">
+ # # ]
+ #
+ # Be careful because this also means you're initializing a model
+ # object with only the fields that you've selected. If you attempt
+ # to access a field except +id+ that is not in the initialized record you'll
+ # receive:
+ #
+ # person.pets.select(:name).first.person_id
+ # # => ActiveModel::MissingAttributeError: missing attribute: person_id
+ #
+ # *Second:* You can pass a block so it can be used just like Array#select.
+ # This builds an array of objects from the database for the scope,
+ # converting them into an array and iterating through them using
+ # Array#select.
+ #
+ # person.pets.select { |pet| pet.name =~ /oo/ }
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+
+ # Finds an object in the collection responding to the +id+. Uses the same
+ # rules as ActiveRecord::Base.find. Returns ActiveRecord::RecordNotFound
+ # error if the object cannot be found.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.find(1) # => #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>
+ # person.pets.find(4) # => ActiveRecord::RecordNotFound: Couldn't find Pet with 'id'=4
+ #
+ # person.pets.find(2) { |pet| pet.name.downcase! }
+ # # => #<Pet id: 2, name: "fancy-fancy", person_id: 1>
+ #
+ # person.pets.find(2, 3)
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ def find(*args, &block)
+ @association.find(*args, &block)
+ end
+
+ ##
+ # :method: first
+ #
+ # :call-seq:
+ # first(limit = nil)
+ #
+ # Returns the first record, or the first +n+ records, from the collection.
+ # If the collection is empty, the first form returns +nil+, and the second
+ # form returns an empty array.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.first # => #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>
+ #
+ # person.pets.first(2)
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>
+ # # ]
+ #
+ # another_person_without.pets # => []
+ # another_person_without.pets.first # => nil
+ # another_person_without.pets.first(3) # => []
+
+ ##
+ # :method: second
+ #
+ # :call-seq:
+ # second()
+ #
+ # Same as #first except returns only the second record.
+
+ ##
+ # :method: third
+ #
+ # :call-seq:
+ # third()
+ #
+ # Same as #first except returns only the third record.
+
+ ##
+ # :method: fourth
+ #
+ # :call-seq:
+ # fourth()
+ #
+ # Same as #first except returns only the fourth record.
+
+ ##
+ # :method: fifth
+ #
+ # :call-seq:
+ # fifth()
+ #
+ # Same as #first except returns only the fifth record.
+
+ ##
+ # :method: forty_two
+ #
+ # :call-seq:
+ # forty_two()
+ #
+ # Same as #first except returns only the forty second record.
+ # Also known as accessing "the reddit".
+
+ ##
+ # :method: third_to_last
+ #
+ # :call-seq:
+ # third_to_last()
+ #
+ # Same as #first except returns only the third-to-last record.
+
+ ##
+ # :method: second_to_last
+ #
+ # :call-seq:
+ # second_to_last()
+ #
+ # Same as #first except returns only the second-to-last record.
+
+ # Returns the last record, or the last +n+ records, from the collection.
+ # If the collection is empty, the first form returns +nil+, and the second
+ # form returns an empty array.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.last # => #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ #
+ # person.pets.last(2)
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # another_person_without.pets # => []
+ # another_person_without.pets.last # => nil
+ # another_person_without.pets.last(3) # => []
+ def last(limit = nil)
+ load_target if find_from_target?
+ super
+ end
+
+ # Gives a record (or N records if a parameter is supplied) from the collection
+ # using the same rules as <tt>ActiveRecord::Base.take</tt>.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.take # => #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>
+ #
+ # person.pets.take(2)
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>
+ # # ]
+ #
+ # another_person_without.pets # => []
+ # another_person_without.pets.take # => nil
+ # another_person_without.pets.take(2) # => []
+ def take(limit = nil)
+ load_target if find_from_target?
+ super
+ end
+
+ # Returns a new object of the collection type that has been instantiated
+ # with +attributes+ and linked to this object, but have not yet been saved.
+ # You can pass an array of attributes hashes, this will return an array
+ # with the new objects.
+ #
+ # class Person
+ # has_many :pets
+ # end
+ #
+ # person.pets.build
+ # # => #<Pet id: nil, name: nil, person_id: 1>
+ #
+ # person.pets.build(name: 'Fancy-Fancy')
+ # # => #<Pet id: nil, name: "Fancy-Fancy", person_id: 1>
+ #
+ # person.pets.build([{name: 'Spook'}, {name: 'Choo-Choo'}, {name: 'Brain'}])
+ # # => [
+ # # #<Pet id: nil, name: "Spook", person_id: 1>,
+ # # #<Pet id: nil, name: "Choo-Choo", person_id: 1>,
+ # # #<Pet id: nil, name: "Brain", person_id: 1>
+ # # ]
+ #
+ # person.pets.size # => 5 # size of the collection
+ # person.pets.count # => 0 # count from database
+ def build(attributes = {}, &block)
+ @association.build(attributes, &block)
+ end
+ alias_method :new, :build
+
+ # Returns a new object of the collection type that has been instantiated with
+ # attributes, linked to this object and that has already been saved (if it
+ # passes the validations).
+ #
+ # class Person
+ # has_many :pets
+ # end
+ #
+ # person.pets.create(name: 'Fancy-Fancy')
+ # # => #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>
+ #
+ # person.pets.create([{name: 'Spook'}, {name: 'Choo-Choo'}])
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.size # => 3
+ # person.pets.count # => 3
+ #
+ # person.pets.find(1, 2, 3)
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ def create(attributes = {}, &block)
+ @association.create(attributes, &block)
+ end
+
+ # Like #create, except that if the record is invalid, raises an exception.
+ #
+ # class Person
+ # has_many :pets
+ # end
+ #
+ # class Pet
+ # validates :name, presence: true
+ # end
+ #
+ # person.pets.create!(name: nil)
+ # # => ActiveRecord::RecordInvalid: Validation failed: Name can't be blank
+ def create!(attributes = {}, &block)
+ @association.create!(attributes, &block)
+ end
+
+ # Add one or more records to the collection by setting their foreign keys
+ # to the association's primary key. Since #<< flattens its argument list and
+ # inserts each record, +push+ and #concat behave identically. Returns +self+
+ # so method calls may be chained.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.size # => 0
+ # person.pets.concat(Pet.new(name: 'Fancy-Fancy'))
+ # person.pets.concat(Pet.new(name: 'Spook'), Pet.new(name: 'Choo-Choo'))
+ # person.pets.size # => 3
+ #
+ # person.id # => 1
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.concat([Pet.new(name: 'Brain'), Pet.new(name: 'Benny')])
+ # person.pets.size # => 5
+ def concat(*records)
+ @association.concat(*records)
+ end
+
+ # Replaces this collection with +other_array+. This will perform a diff
+ # and delete/add only records that have changed.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets
+ # # => [#<Pet id: 1, name: "Gorby", group: "cats", person_id: 1>]
+ #
+ # other_pets = [Pet.new(name: 'Puff', group: 'celebrities']
+ #
+ # person.pets.replace(other_pets)
+ #
+ # person.pets
+ # # => [#<Pet id: 2, name: "Puff", group: "celebrities", person_id: 1>]
+ #
+ # If the supplied array has an incorrect association type, it raises
+ # an <tt>ActiveRecord::AssociationTypeMismatch</tt> error:
+ #
+ # person.pets.replace(["doo", "ggie", "gaga"])
+ # # => ActiveRecord::AssociationTypeMismatch: Pet expected, got String
+ def replace(other_array)
+ @association.replace(other_array)
+ end
+
+ # Deletes all the records from the collection according to the strategy
+ # specified by the +:dependent+ option. If no +:dependent+ option is given,
+ # then it will follow the default strategy.
+ #
+ # For <tt>has_many :through</tt> associations, the default deletion strategy is
+ # +:delete_all+.
+ #
+ # For +has_many+ associations, the default deletion strategy is +:nullify+.
+ # This sets the foreign keys to +NULL+.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets # dependent: :nullify option by default
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.delete_all
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.size # => 0
+ # person.pets # => []
+ #
+ # Pet.find(1, 2, 3)
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: nil>,
+ # # #<Pet id: 2, name: "Spook", person_id: nil>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: nil>
+ # # ]
+ #
+ # Both +has_many+ and <tt>has_many :through</tt> dependencies default to the
+ # +:delete_all+ strategy if the +:dependent+ option is set to +:destroy+.
+ # Records are not instantiated and callbacks will not be fired.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets, dependent: :destroy
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.delete_all
+ #
+ # Pet.find(1, 2, 3)
+ # # => ActiveRecord::RecordNotFound: Couldn't find all Pets with 'id': (1, 2, 3)
+ #
+ # If it is set to <tt>:delete_all</tt>, all the objects are deleted
+ # *without* calling their +destroy+ method.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets, dependent: :delete_all
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.delete_all
+ #
+ # Pet.find(1, 2, 3)
+ # # => ActiveRecord::RecordNotFound: Couldn't find all Pets with 'id': (1, 2, 3)
+ def delete_all(dependent = nil)
+ @association.delete_all(dependent)
+ end
+
+ # Deletes the records of the collection directly from the database
+ # ignoring the +:dependent+ option. Records are instantiated and it
+ # invokes +before_remove+, +after_remove+ , +before_destroy+ and
+ # +after_destroy+ callbacks.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.destroy_all
+ #
+ # person.pets.size # => 0
+ # person.pets # => []
+ #
+ # Pet.find(1) # => Couldn't find Pet with id=1
+ def destroy_all
+ @association.destroy_all
+ end
+
+ # Deletes the +records+ supplied from the collection according to the strategy
+ # specified by the +:dependent+ option. If no +:dependent+ option is given,
+ # then it will follow the default strategy. Returns an array with the
+ # deleted records.
+ #
+ # For <tt>has_many :through</tt> associations, the default deletion strategy is
+ # +:delete_all+.
+ #
+ # For +has_many+ associations, the default deletion strategy is +:nullify+.
+ # This sets the foreign keys to +NULL+.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets # dependent: :nullify option by default
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.delete(Pet.find(1))
+ # # => [#<Pet id: 1, name: "Fancy-Fancy", person_id: 1>]
+ #
+ # person.pets.size # => 2
+ # person.pets
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # Pet.find(1)
+ # # => #<Pet id: 1, name: "Fancy-Fancy", person_id: nil>
+ #
+ # If it is set to <tt>:destroy</tt> all the +records+ are removed by calling
+ # their +destroy+ method. See +destroy+ for more information.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets, dependent: :destroy
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.delete(Pet.find(1), Pet.find(3))
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.size # => 1
+ # person.pets
+ # # => [#<Pet id: 2, name: "Spook", person_id: 1>]
+ #
+ # Pet.find(1, 3)
+ # # => ActiveRecord::RecordNotFound: Couldn't find all Pets with 'id': (1, 3)
+ #
+ # If it is set to <tt>:delete_all</tt>, all the +records+ are deleted
+ # *without* calling their +destroy+ method.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets, dependent: :delete_all
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.delete(Pet.find(1))
+ # # => [#<Pet id: 1, name: "Fancy-Fancy", person_id: 1>]
+ #
+ # person.pets.size # => 2
+ # person.pets
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # Pet.find(1)
+ # # => ActiveRecord::RecordNotFound: Couldn't find Pet with 'id'=1
+ #
+ # You can pass +Integer+ or +String+ values, it finds the records
+ # responding to the +id+ and executes delete on them.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.delete("1")
+ # # => [#<Pet id: 1, name: "Fancy-Fancy", person_id: 1>]
+ #
+ # person.pets.delete(2, 3)
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ def delete(*records)
+ @association.delete(*records)
+ end
+
+ # Destroys the +records+ supplied and removes them from the collection.
+ # This method will _always_ remove record from the database ignoring
+ # the +:dependent+ option. Returns an array with the removed records.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.destroy(Pet.find(1))
+ # # => [#<Pet id: 1, name: "Fancy-Fancy", person_id: 1>]
+ #
+ # person.pets.size # => 2
+ # person.pets
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.destroy(Pet.find(2), Pet.find(3))
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.size # => 0
+ # person.pets # => []
+ #
+ # Pet.find(1, 2, 3) # => ActiveRecord::RecordNotFound: Couldn't find all Pets with 'id': (1, 2, 3)
+ #
+ # You can pass +Integer+ or +String+ values, it finds the records
+ # responding to the +id+ and then deletes them from the database.
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 4, name: "Benny", person_id: 1>,
+ # # #<Pet id: 5, name: "Brain", person_id: 1>,
+ # # #<Pet id: 6, name: "Boss", person_id: 1>
+ # # ]
+ #
+ # person.pets.destroy("4")
+ # # => #<Pet id: 4, name: "Benny", person_id: 1>
+ #
+ # person.pets.size # => 2
+ # person.pets
+ # # => [
+ # # #<Pet id: 5, name: "Brain", person_id: 1>,
+ # # #<Pet id: 6, name: "Boss", person_id: 1>
+ # # ]
+ #
+ # person.pets.destroy(5, 6)
+ # # => [
+ # # #<Pet id: 5, name: "Brain", person_id: 1>,
+ # # #<Pet id: 6, name: "Boss", person_id: 1>
+ # # ]
+ #
+ # person.pets.size # => 0
+ # person.pets # => []
+ #
+ # Pet.find(4, 5, 6) # => ActiveRecord::RecordNotFound: Couldn't find all Pets with 'id': (4, 5, 6)
+ def destroy(*records)
+ @association.destroy(*records)
+ end
+
+ ##
+ # :method: distinct
+ #
+ # :call-seq:
+ # distinct(value = true)
+ #
+ # Specifies whether the records should be unique or not.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.select(:name)
+ # # => [
+ # # #<Pet name: "Fancy-Fancy">,
+ # # #<Pet name: "Fancy-Fancy">
+ # # ]
+ #
+ # person.pets.select(:name).distinct
+ # # => [#<Pet name: "Fancy-Fancy">]
+ #
+ # person.pets.select(:name).distinct.distinct(false)
+ # # => [
+ # # #<Pet name: "Fancy-Fancy">,
+ # # #<Pet name: "Fancy-Fancy">
+ # # ]
+
+ #--
+ def calculate(operation, column_name)
+ null_scope? ? scope.calculate(operation, column_name) : super
+ end
+
+ def pluck(*column_names)
+ null_scope? ? scope.pluck(*column_names) : super
+ end
+
+ ##
+ # :method: count
+ #
+ # :call-seq:
+ # count(column_name = nil, &block)
+ #
+ # Count all records.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # # This will perform the count using SQL.
+ # person.pets.count # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # Passing a block will select all of a person's pets in SQL and then
+ # perform the count using Ruby.
+ #
+ # person.pets.count { |pet| pet.name.include?('-') } # => 2
+
+ # Returns the size of the collection. If the collection hasn't been loaded,
+ # it executes a <tt>SELECT COUNT(*)</tt> query. Else it calls <tt>collection.size</tt>.
+ #
+ # If the collection has been already loaded +size+ and +length+ are
+ # equivalent. If not and you are going to need the records anyway
+ # +length+ will take one less query. Otherwise +size+ is more efficient.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.size # => 3
+ # # executes something like SELECT COUNT(*) FROM "pets" WHERE "pets"."person_id" = 1
+ #
+ # person.pets # This will execute a SELECT * FROM query
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.size # => 3
+ # # Because the collection is already loaded, this will behave like
+ # # collection.size and no SQL count query is executed.
+ def size
+ @association.size
+ end
+
+ ##
+ # :method: length
+ #
+ # :call-seq:
+ # length()
+ #
+ # Returns the size of the collection calling +size+ on the target.
+ # If the collection has been already loaded, +length+ and +size+ are
+ # equivalent. If not and you are going to need the records anyway this
+ # method will take one less query. Otherwise +size+ is more efficient.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.length # => 3
+ # # executes something like SELECT "pets".* FROM "pets" WHERE "pets"."person_id" = 1
+ #
+ # # Because the collection is loaded, you can
+ # # call the collection with no additional queries:
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+
+ # Returns +true+ if the collection is empty. If the collection has been
+ # loaded it is equivalent
+ # to <tt>collection.size.zero?</tt>. If the collection has not been loaded,
+ # it is equivalent to <tt>!collection.exists?</tt>. If the collection has
+ # not already been loaded and you are going to fetch the records anyway it
+ # is better to check <tt>collection.length.zero?</tt>.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.count # => 1
+ # person.pets.empty? # => false
+ #
+ # person.pets.delete_all
+ #
+ # person.pets.count # => 0
+ # person.pets.empty? # => true
+ def empty?
+ @association.empty?
+ end
+
+ ##
+ # :method: any?
+ #
+ # :call-seq:
+ # any?()
+ #
+ # Returns +true+ if the collection is not empty.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.count # => 0
+ # person.pets.any? # => false
+ #
+ # person.pets << Pet.new(name: 'Snoop')
+ # person.pets.count # => 1
+ # person.pets.any? # => true
+ #
+ # You can also pass a +block+ to define criteria. The behavior
+ # is the same, it returns true if the collection based on the
+ # criteria is not empty.
+ #
+ # person.pets
+ # # => [#<Pet name: "Snoop", group: "dogs">]
+ #
+ # person.pets.any? do |pet|
+ # pet.group == 'cats'
+ # end
+ # # => false
+ #
+ # person.pets.any? do |pet|
+ # pet.group == 'dogs'
+ # end
+ # # => true
+
+ ##
+ # :method: many?
+ #
+ # :call-seq:
+ # many?()
+ #
+ # Returns true if the collection has more than one record.
+ # Equivalent to <tt>collection.size > 1</tt>.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.count # => 1
+ # person.pets.many? # => false
+ #
+ # person.pets << Pet.new(name: 'Snoopy')
+ # person.pets.count # => 2
+ # person.pets.many? # => true
+ #
+ # You can also pass a +block+ to define criteria. The
+ # behavior is the same, it returns true if the collection
+ # based on the criteria has more than one record.
+ #
+ # person.pets
+ # # => [
+ # # #<Pet name: "Gorby", group: "cats">,
+ # # #<Pet name: "Puff", group: "cats">,
+ # # #<Pet name: "Snoop", group: "dogs">
+ # # ]
+ #
+ # person.pets.many? do |pet|
+ # pet.group == 'dogs'
+ # end
+ # # => false
+ #
+ # person.pets.many? do |pet|
+ # pet.group == 'cats'
+ # end
+ # # => true
+
+ # Returns +true+ if the given +record+ is present in the collection.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets # => [#<Pet id: 20, name: "Snoop">]
+ #
+ # person.pets.include?(Pet.find(20)) # => true
+ # person.pets.include?(Pet.find(21)) # => false
+ def include?(record)
+ !!@association.include?(record)
+ end
+
+ def proxy_association
+ @association
+ end
+
+ # Returns a <tt>Relation</tt> object for the records in this association
+ def scope
+ @scope ||= @association.scope
+ end
+
+ # Equivalent to <tt>Array#==</tt>. Returns +true+ if the two arrays
+ # contain the same number of elements and if each element is equal
+ # to the corresponding element in the +other+ array, otherwise returns
+ # +false+.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>
+ # # ]
+ #
+ # other = person.pets.to_ary
+ #
+ # person.pets == other
+ # # => true
+ #
+ # other = [Pet.new(id: 1), Pet.new(id: 2)]
+ #
+ # person.pets == other
+ # # => false
+ def ==(other)
+ load_target == other
+ end
+
+ # Returns a new array of objects from the collection. If the collection
+ # hasn't been loaded, it fetches the records from the database.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets
+ # # => [
+ # # #<Pet id: 4, name: "Benny", person_id: 1>,
+ # # #<Pet id: 5, name: "Brain", person_id: 1>,
+ # # #<Pet id: 6, name: "Boss", person_id: 1>
+ # # ]
+ #
+ # other_pets = person.pets.to_ary
+ # # => [
+ # # #<Pet id: 4, name: "Benny", person_id: 1>,
+ # # #<Pet id: 5, name: "Brain", person_id: 1>,
+ # # #<Pet id: 6, name: "Boss", person_id: 1>
+ # # ]
+ #
+ # other_pets.replace([Pet.new(name: 'BooGoo')])
+ #
+ # other_pets
+ # # => [#<Pet id: nil, name: "BooGoo", person_id: 1>]
+ #
+ # person.pets
+ # # This is not affected by replace
+ # # => [
+ # # #<Pet id: 4, name: "Benny", person_id: 1>,
+ # # #<Pet id: 5, name: "Brain", person_id: 1>,
+ # # #<Pet id: 6, name: "Boss", person_id: 1>
+ # # ]
+ def to_ary
+ load_target.dup
+ end
+ alias_method :to_a, :to_ary
+
+ def records # :nodoc:
+ load_target
+ end
+
+ # Adds one or more +records+ to the collection by setting their foreign keys
+ # to the association's primary key. Returns +self+, so several appends may be
+ # chained together.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.size # => 0
+ # person.pets << Pet.new(name: 'Fancy-Fancy')
+ # person.pets << [Pet.new(name: 'Spook'), Pet.new(name: 'Choo-Choo')]
+ # person.pets.size # => 3
+ #
+ # person.id # => 1
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ def <<(*records)
+ proxy_association.concat(records) && self
+ end
+ alias_method :push, :<<
+ alias_method :append, :<<
+
+ def prepend(*args)
+ raise NoMethodError, "prepend on association is not defined. Please use <<, push or append"
+ end
+
+ # Equivalent to +delete_all+. The difference is that returns +self+, instead
+ # of an array with the deleted objects, so methods can be chained. See
+ # +delete_all+ for more information.
+ # Note that because +delete_all+ removes records by directly
+ # running an SQL query into the database, the +updated_at+ column of
+ # the object is not changed.
+ def clear
+ delete_all
+ self
+ end
+
+ # Reloads the collection from the database. Returns +self+.
+ # Equivalent to <tt>collection(true)</tt>.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets # fetches pets from the database
+ # # => [#<Pet id: 1, name: "Snoop", group: "dogs", person_id: 1>]
+ #
+ # person.pets # uses the pets cache
+ # # => [#<Pet id: 1, name: "Snoop", group: "dogs", person_id: 1>]
+ #
+ # person.pets.reload # fetches pets from the database
+ # # => [#<Pet id: 1, name: "Snoop", group: "dogs", person_id: 1>]
+ #
+ # person.pets(true) # fetches pets from the database
+ # # => [#<Pet id: 1, name: "Snoop", group: "dogs", person_id: 1>]
+ def reload
+ proxy_association.reload
+ reset_scope
+ end
+
+ # Unloads the association. Returns +self+.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets # fetches pets from the database
+ # # => [#<Pet id: 1, name: "Snoop", group: "dogs", person_id: 1>]
+ #
+ # person.pets # uses the pets cache
+ # # => [#<Pet id: 1, name: "Snoop", group: "dogs", person_id: 1>]
+ #
+ # person.pets.reset # clears the pets cache
+ #
+ # person.pets # fetches pets from the database
+ # # => [#<Pet id: 1, name: "Snoop", group: "dogs", person_id: 1>]
+ def reset
+ proxy_association.reset
+ proxy_association.reset_scope
+ reset_scope
+ end
+
+ def reset_scope # :nodoc:
+ @offsets = {}
+ @scope = nil
+ self
+ end
+
+ delegate_methods = [
+ QueryMethods,
+ SpawnMethods,
+ ].flat_map { |klass|
+ klass.public_instance_methods(false)
+ } - self.public_instance_methods(false) - [:select] + [:scoping]
+
+ delegate(*delegate_methods, to: :scope)
+
+ private
+
+ def find_nth_with_limit(index, limit)
+ load_target if find_from_target?
+ super
+ end
+
+ def find_nth_from_last(index)
+ load_target if find_from_target?
+ super
+ end
+
+ def null_scope?
+ @association.null_scope?
+ end
+
+ def find_from_target?
+ @association.find_from_target?
+ end
+
+ def exec_queries
+ load_target
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/foreign_association.rb b/activerecord/lib/active_record/associations/foreign_association.rb
new file mode 100644
index 0000000000..40010cde03
--- /dev/null
+++ b/activerecord/lib/active_record/associations/foreign_association.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+module ActiveRecord::Associations
+ module ForeignAssociation # :nodoc:
+ def foreign_key_present?
+ if reflection.klass.primary_key
+ owner.attribute_present?(reflection.active_record_primary_key)
+ else
+ false
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/has_many_association.rb b/activerecord/lib/active_record/associations/has_many_association.rb
new file mode 100644
index 0000000000..88fe33eef2
--- /dev/null
+++ b/activerecord/lib/active_record/associations/has_many_association.rb
@@ -0,0 +1,135 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record Has Many Association
+ module Associations
+ # This is the proxy that handles a has many association.
+ #
+ # If the association has a <tt>:through</tt> option further specialization
+ # is provided by its child HasManyThroughAssociation.
+ class HasManyAssociation < CollectionAssociation #:nodoc:
+ include ForeignAssociation
+
+ def handle_dependency
+ case options[:dependent]
+ when :restrict_with_exception
+ raise ActiveRecord::DeleteRestrictionError.new(reflection.name) unless empty?
+
+ when :restrict_with_error
+ unless empty?
+ record = owner.class.human_attribute_name(reflection.name).downcase
+ owner.errors.add(:base, :'restrict_dependent_destroy.has_many', record: record)
+ throw(:abort)
+ end
+
+ when :destroy
+ # No point in executing the counter update since we're going to destroy the parent anyway
+ load_target.each { |t| t.destroyed_by_association = reflection }
+ destroy_all
+ else
+ delete_all
+ end
+ end
+
+ def insert_record(record, validate = true, raise = false)
+ set_owner_attributes(record)
+ super
+ end
+
+ def empty?
+ if reflection.has_cached_counter?
+ size.zero?
+ else
+ super
+ end
+ end
+
+ private
+
+ # Returns the number of records in this collection.
+ #
+ # If the association has a counter cache it gets that value. Otherwise
+ # it will attempt to do a count via SQL, bounded to <tt>:limit</tt> if
+ # there's one. Some configuration options like :group make it impossible
+ # to do an SQL count, in those cases the array count will be used.
+ #
+ # That does not depend on whether the collection has already been loaded
+ # or not. The +size+ method is the one that takes the loaded flag into
+ # account and delegates to +count_records+ if needed.
+ #
+ # If the collection is empty the target is set to an empty array and
+ # the loaded flag is set to true as well.
+ def count_records
+ count = if reflection.has_cached_counter?
+ owner._read_attribute(reflection.counter_cache_column).to_i
+ else
+ scope.count
+ end
+
+ # If there's nothing in the database and @target has no new records
+ # we are certain the current target is an empty array. This is a
+ # documented side-effect of the method that may avoid an extra SELECT.
+ (@target ||= []) && loaded! if count == 0
+
+ [association_scope.limit_value, count].compact.min
+ end
+
+ def update_counter(difference, reflection = reflection())
+ if reflection.has_cached_counter?
+ owner.increment!(reflection.counter_cache_column, difference)
+ end
+ end
+
+ def update_counter_in_memory(difference, reflection = reflection())
+ if reflection.counter_must_be_updated_by_has_many?
+ counter = reflection.counter_cache_column
+ owner.increment(counter, difference)
+ owner.send(:clear_attribute_change, counter) # eww
+ end
+ end
+
+ def delete_count(method, scope)
+ if method == :delete_all
+ scope.delete_all
+ else
+ scope.update_all(reflection.foreign_key => nil)
+ end
+ end
+
+ def delete_or_nullify_all_records(method)
+ count = delete_count(method, scope)
+ update_counter(-count)
+ end
+
+ # Deletes the records according to the <tt>:dependent</tt> option.
+ def delete_records(records, method)
+ if method == :destroy
+ records.each(&:destroy!)
+ update_counter(-records.length) unless reflection.inverse_updates_counter_cache?
+ else
+ scope = self.scope.where(reflection.klass.primary_key => records)
+ update_counter(-delete_count(method, scope))
+ end
+ end
+
+ def concat_records(records, *)
+ update_counter_if_success(super, records.length)
+ end
+
+ def _create_record(attributes, *)
+ if attributes.is_a?(Array)
+ super
+ else
+ update_counter_if_success(super, 1)
+ end
+ end
+
+ def update_counter_if_success(saved_successfully, difference)
+ if saved_successfully
+ update_counter_in_memory(difference)
+ end
+ saved_successfully
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/has_many_through_association.rb b/activerecord/lib/active_record/associations/has_many_through_association.rb
new file mode 100644
index 0000000000..89ce00f98e
--- /dev/null
+++ b/activerecord/lib/active_record/associations/has_many_through_association.rb
@@ -0,0 +1,216 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record Has Many Through Association
+ module Associations
+ class HasManyThroughAssociation < HasManyAssociation #:nodoc:
+ include ThroughAssociation
+
+ def initialize(owner, reflection)
+ super
+
+ @through_records = {}
+ @through_association = nil
+ end
+
+ def concat(*records)
+ unless owner.new_record?
+ records.flatten.each do |record|
+ raise_on_type_mismatch!(record)
+ end
+ end
+
+ super
+ end
+
+ def concat_records(records)
+ ensure_not_nested
+
+ records = super(records, true)
+
+ if owner.new_record? && records
+ records.flatten.each do |record|
+ build_through_record(record)
+ end
+ end
+
+ records
+ end
+
+ def insert_record(record, validate = true, raise = false)
+ ensure_not_nested
+
+ if record.new_record? || record.has_changes_to_save?
+ return unless super
+ end
+
+ save_through_record(record)
+
+ record
+ end
+
+ private
+
+ def through_association
+ @through_association ||= owner.association(through_reflection.name)
+ end
+
+ # The through record (built with build_record) is temporarily cached
+ # so that it may be reused if insert_record is subsequently called.
+ #
+ # However, after insert_record has been called, the cache is cleared in
+ # order to allow multiple instances of the same record in an association.
+ def build_through_record(record)
+ @through_records[record.object_id] ||= begin
+ ensure_mutable
+
+ through_record = through_association.build(*options_for_through_record)
+ through_record.send("#{source_reflection.name}=", record)
+
+ if options[:source_type]
+ through_record.send("#{source_reflection.foreign_type}=", options[:source_type])
+ end
+
+ through_record
+ end
+ end
+
+ def options_for_through_record
+ [through_scope_attributes]
+ end
+
+ def through_scope_attributes
+ scope.where_values_hash(through_association.reflection.name.to_s).
+ except!(through_association.reflection.foreign_key,
+ through_association.reflection.klass.inheritance_column)
+ end
+
+ def save_through_record(record)
+ association = build_through_record(record)
+ if association.changed?
+ association.save!
+ end
+ ensure
+ @through_records.delete(record.object_id)
+ end
+
+ def build_record(attributes)
+ ensure_not_nested
+
+ record = super(attributes)
+
+ inverse = source_reflection.inverse_of
+ if inverse
+ if inverse.collection?
+ record.send(inverse.name) << build_through_record(record)
+ elsif inverse.has_one?
+ record.send("#{inverse.name}=", build_through_record(record))
+ end
+ end
+
+ record
+ end
+
+ def remove_records(existing_records, records, method)
+ super
+ delete_through_records(records)
+ end
+
+ def target_reflection_has_associated_record?
+ !(through_reflection.belongs_to? && owner[through_reflection.foreign_key].blank?)
+ end
+
+ def update_through_counter?(method)
+ case method
+ when :destroy
+ !through_reflection.inverse_updates_counter_cache?
+ when :nullify
+ false
+ else
+ true
+ end
+ end
+
+ def delete_or_nullify_all_records(method)
+ delete_records(load_target, method)
+ end
+
+ def delete_records(records, method)
+ ensure_not_nested
+
+ scope = through_association.scope
+ scope.where! construct_join_attributes(*records)
+
+ case method
+ when :destroy
+ if scope.klass.primary_key
+ count = scope.destroy_all.length
+ else
+ scope.each(&:_run_destroy_callbacks)
+
+ arel = scope.arel
+
+ stmt = Arel::DeleteManager.new
+ stmt.from scope.klass.arel_table
+ stmt.wheres = arel.constraints
+
+ count = scope.klass.connection.delete(stmt, "SQL")
+ end
+ when :nullify
+ count = scope.update_all(source_reflection.foreign_key => nil)
+ else
+ count = scope.delete_all
+ end
+
+ delete_through_records(records)
+
+ if source_reflection.options[:counter_cache] && method != :destroy
+ counter = source_reflection.counter_cache_column
+ klass.decrement_counter counter, records.map(&:id)
+ end
+
+ if through_reflection.collection? && update_through_counter?(method)
+ update_counter(-count, through_reflection)
+ else
+ update_counter(-count)
+ end
+ end
+
+ def through_records_for(record)
+ attributes = construct_join_attributes(record)
+ candidates = Array.wrap(through_association.target)
+ candidates.find_all do |c|
+ attributes.all? do |key, value|
+ c.public_send(key) == value
+ end
+ end
+ end
+
+ def delete_through_records(records)
+ records.each do |record|
+ through_records = through_records_for(record)
+
+ if through_reflection.collection?
+ through_records.each { |r| through_association.target.delete(r) }
+ else
+ if through_records.include?(through_association.target)
+ through_association.target = nil
+ end
+ end
+
+ @through_records.delete(record.object_id)
+ end
+ end
+
+ def find_target
+ return [] unless target_reflection_has_associated_record?
+ super
+ end
+
+ # NOTE - not sure that we can actually cope with inverses here
+ def invertible_for?(record)
+ false
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/has_one_association.rb b/activerecord/lib/active_record/associations/has_one_association.rb
new file mode 100644
index 0000000000..9a88c1af70
--- /dev/null
+++ b/activerecord/lib/active_record/associations/has_one_association.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record Has One Association
+ module Associations
+ class HasOneAssociation < SingularAssociation #:nodoc:
+ include ForeignAssociation
+
+ def handle_dependency
+ case options[:dependent]
+ when :restrict_with_exception
+ raise ActiveRecord::DeleteRestrictionError.new(reflection.name) if load_target
+
+ when :restrict_with_error
+ if load_target
+ record = owner.class.human_attribute_name(reflection.name).downcase
+ owner.errors.add(:base, :'restrict_dependent_destroy.has_one', record: record)
+ throw(:abort)
+ end
+
+ else
+ delete
+ end
+ end
+
+ def replace(record, save = true)
+ raise_on_type_mismatch!(record) if record
+ load_target
+
+ return target unless target || record
+
+ assigning_another_record = target != record
+ if assigning_another_record || record.has_changes_to_save?
+ save &&= owner.persisted?
+
+ transaction_if(save) do
+ remove_target!(options[:dependent]) if target && !target.destroyed? && assigning_another_record
+
+ if record
+ set_owner_attributes(record)
+ set_inverse_instance(record)
+
+ if save && !record.save
+ nullify_owner_attributes(record)
+ set_owner_attributes(target) if target
+ raise RecordNotSaved, "Failed to save the new associated #{reflection.name}."
+ end
+ end
+ end
+ end
+
+ self.target = record
+ end
+
+ def delete(method = options[:dependent])
+ if load_target
+ case method
+ when :delete
+ target.delete
+ when :destroy
+ target.destroyed_by_association = reflection
+ target.destroy
+ when :nullify
+ target.update_columns(reflection.foreign_key => nil) if target.persisted?
+ end
+ end
+ end
+
+ private
+
+ # The reason that the save param for replace is false, if for create (not just build),
+ # is because the setting of the foreign keys is actually handled by the scoping when
+ # the record is instantiated, and so they are set straight away and do not need to be
+ # updated within replace.
+ def set_new_record(record)
+ replace(record, false)
+ end
+
+ def remove_target!(method)
+ case method
+ when :delete
+ target.delete
+ when :destroy
+ target.destroyed_by_association = reflection
+ target.destroy
+ else
+ nullify_owner_attributes(target)
+ remove_inverse_instance(target)
+
+ if target.persisted? && owner.persisted? && !target.save
+ set_owner_attributes(target)
+ raise RecordNotSaved, "Failed to remove the existing associated #{reflection.name}. " \
+ "The record failed to save after its foreign key was set to nil."
+ end
+ end
+ end
+
+ def nullify_owner_attributes(record)
+ record[reflection.foreign_key] = nil
+ end
+
+ def transaction_if(value)
+ if value
+ reflection.klass.transaction { yield }
+ else
+ yield
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/has_one_through_association.rb b/activerecord/lib/active_record/associations/has_one_through_association.rb
new file mode 100644
index 0000000000..eb54977aa0
--- /dev/null
+++ b/activerecord/lib/active_record/associations/has_one_through_association.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record Has One Through Association
+ module Associations
+ class HasOneThroughAssociation < HasOneAssociation #:nodoc:
+ include ThroughAssociation
+
+ def replace(record)
+ create_through_record(record)
+ self.target = record
+ end
+
+ private
+
+ def create_through_record(record)
+ ensure_not_nested
+
+ through_proxy = owner.association(through_reflection.name)
+ through_record = through_proxy.load_target
+
+ if through_record && !record
+ through_record.destroy
+ elsif record
+ attributes = construct_join_attributes(record)
+
+ if through_record && through_record.destroyed?
+ through_record = through_proxy.tap(&:reload).target
+ end
+
+ if through_record
+ through_record.update(attributes)
+ elsif owner.new_record?
+ through_proxy.build(attributes)
+ else
+ through_proxy.create(attributes)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/join_dependency.rb b/activerecord/lib/active_record/associations/join_dependency.rb
new file mode 100644
index 0000000000..dc029c08bd
--- /dev/null
+++ b/activerecord/lib/active_record/associations/join_dependency.rb
@@ -0,0 +1,287 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Associations
+ class JoinDependency # :nodoc:
+ autoload :JoinBase, "active_record/associations/join_dependency/join_base"
+ autoload :JoinAssociation, "active_record/associations/join_dependency/join_association"
+
+ class Aliases # :nodoc:
+ def initialize(tables)
+ @tables = tables
+ @alias_cache = tables.each_with_object({}) { |table, h|
+ h[table.node] = table.columns.each_with_object({}) { |column, i|
+ i[column.name] = column.alias
+ }
+ }
+ @name_and_alias_cache = tables.each_with_object({}) { |table, h|
+ h[table.node] = table.columns.map { |column|
+ [column.name, column.alias]
+ }
+ }
+ end
+
+ def columns
+ @tables.flat_map(&:column_aliases)
+ end
+
+ # An array of [column_name, alias] pairs for the table
+ def column_aliases(node)
+ @name_and_alias_cache[node]
+ end
+
+ def column_alias(node, column)
+ @alias_cache[node][column]
+ end
+
+ Table = Struct.new(:node, :columns) do # :nodoc:
+ def column_aliases
+ t = node.table
+ columns.map { |column| t[column.name].as Arel.sql column.alias }
+ end
+ end
+ Column = Struct.new(:name, :alias)
+ end
+
+ attr_reader :alias_tracker, :base_klass, :join_root
+
+ def self.make_tree(associations)
+ hash = {}
+ walk_tree associations, hash
+ hash
+ end
+
+ def self.walk_tree(associations, hash)
+ case associations
+ when Symbol, String
+ hash[associations.to_sym] ||= {}
+ when Array
+ associations.each do |assoc|
+ walk_tree assoc, hash
+ end
+ when Hash
+ associations.each do |k, v|
+ cache = hash[k] ||= {}
+ walk_tree v, cache
+ end
+ else
+ raise ConfigurationError, associations.inspect
+ end
+ end
+
+ # base is the base class on which operation is taking place.
+ # associations is the list of associations which are joined using hash, symbol or array.
+ # joins is the list of all string join commands and arel nodes.
+ #
+ # Example :
+ #
+ # class Physician < ActiveRecord::Base
+ # has_many :appointments
+ # has_many :patients, through: :appointments
+ # end
+ #
+ # If I execute `@physician.patients.to_a` then
+ # base # => Physician
+ # associations # => []
+ # joins # => [#<Arel::Nodes::InnerJoin: ...]
+ #
+ # However if I execute `Physician.joins(:appointments).to_a` then
+ # base # => Physician
+ # associations # => [:appointments]
+ # joins # => []
+ #
+ def initialize(base, table, associations, joins, eager_loading: true)
+ @alias_tracker = AliasTracker.create_with_joins(base.connection, base.table_name, joins)
+ @eager_loading = eager_loading
+ tree = self.class.make_tree associations
+ @join_root = JoinBase.new(base, table, build(tree, base))
+ @join_root.children.each { |child| construct_tables! @join_root, child }
+ end
+
+ def reflections
+ join_root.drop(1).map!(&:reflection)
+ end
+
+ def join_constraints(joins_to_add, join_type)
+ joins = join_root.children.flat_map { |child|
+ make_join_constraints(join_root, child, join_type)
+ }
+
+ joins.concat joins_to_add.flat_map { |oj|
+ if join_root.match? oj.join_root
+ walk join_root, oj.join_root
+ else
+ oj.join_root.children.flat_map { |child|
+ make_join_constraints(oj.join_root, child, join_type)
+ }
+ end
+ }
+ end
+
+ def aliases
+ Aliases.new join_root.each_with_index.map { |join_part, i|
+ columns = join_part.column_names.each_with_index.map { |column_name, j|
+ Aliases::Column.new column_name, "t#{i}_r#{j}"
+ }
+ Aliases::Table.new(join_part, columns)
+ }
+ end
+
+ def instantiate(result_set, aliases)
+ primary_key = aliases.column_alias(join_root, join_root.primary_key)
+
+ seen = Hash.new { |i, object_id|
+ i[object_id] = Hash.new { |j, child_class|
+ j[child_class] = {}
+ }
+ }
+
+ model_cache = Hash.new { |h, klass| h[klass] = {} }
+ parents = model_cache[join_root]
+ column_aliases = aliases.column_aliases join_root
+
+ message_bus = ActiveSupport::Notifications.instrumenter
+
+ payload = {
+ record_count: result_set.length,
+ class_name: join_root.base_klass.name
+ }
+
+ message_bus.instrument("instantiation.active_record", payload) do
+ result_set.each { |row_hash|
+ parent_key = primary_key ? row_hash[primary_key] : row_hash
+ parent = parents[parent_key] ||= join_root.instantiate(row_hash, column_aliases)
+ construct(parent, join_root, row_hash, result_set, seen, model_cache, aliases)
+ }
+ end
+
+ parents.values
+ end
+
+ private
+
+ def make_constraints(parent, child, tables, join_type)
+ chain = child.reflection.chain
+ foreign_table = parent.table
+ foreign_klass = parent.base_klass
+ child.join_constraints(foreign_table, foreign_klass, join_type, tables, chain)
+ end
+
+ def make_outer_joins(parent, child)
+ join_type = Arel::Nodes::OuterJoin
+ make_join_constraints(parent, child, join_type, true)
+ end
+
+ def make_join_constraints(parent, child, join_type, aliasing = false)
+ tables = aliasing ? table_aliases_for(parent, child) : child.tables
+ joins = make_constraints(parent, child, tables, join_type)
+
+ joins.concat child.children.flat_map { |c| make_join_constraints(child, c, join_type, aliasing) }
+ end
+
+ def table_aliases_for(parent, node)
+ node.reflection.chain.map { |reflection|
+ alias_tracker.aliased_table_for(
+ reflection.table_name,
+ table_alias_for(reflection, parent, reflection != node.reflection),
+ reflection.klass.type_caster
+ )
+ }
+ end
+
+ def construct_tables!(parent, node)
+ node.tables = table_aliases_for(parent, node)
+ node.children.each { |child| construct_tables! node, child }
+ end
+
+ def table_alias_for(reflection, parent, join)
+ name = "#{reflection.plural_name}_#{parent.table_name}"
+ join ? "#{name}_join" : name
+ end
+
+ def walk(left, right)
+ intersection, missing = right.children.map { |node1|
+ [left.children.find { |node2| node1.match? node2 }, node1]
+ }.partition(&:first)
+
+ ojs = missing.flat_map { |_, n| make_outer_joins left, n }
+ intersection.flat_map { |l, r| walk l, r }.concat ojs
+ end
+
+ def find_reflection(klass, name)
+ klass._reflect_on_association(name) ||
+ raise(ConfigurationError, "Can't join '#{klass.name}' to association named '#{name}'; perhaps you misspelled it?")
+ end
+
+ def build(associations, base_klass)
+ associations.map do |name, right|
+ reflection = find_reflection base_klass, name
+ reflection.check_validity!
+ reflection.check_eager_loadable!
+
+ if reflection.polymorphic?
+ next unless @eager_loading
+ raise EagerLoadPolymorphicError.new(reflection)
+ end
+
+ JoinAssociation.new reflection, build(right, reflection.klass)
+ end.compact
+ end
+
+ def construct(ar_parent, parent, row, rs, seen, model_cache, aliases)
+ return if ar_parent.nil?
+
+ parent.children.each do |node|
+ if node.reflection.collection?
+ other = ar_parent.association(node.reflection.name)
+ other.loaded!
+ elsif ar_parent.association_cached?(node.reflection.name)
+ model = ar_parent.association(node.reflection.name).target
+ construct(model, node, row, rs, seen, model_cache, aliases)
+ next
+ end
+
+ key = aliases.column_alias(node, node.primary_key)
+ id = row[key]
+ if id.nil?
+ nil_association = ar_parent.association(node.reflection.name)
+ nil_association.loaded!
+ next
+ end
+
+ model = seen[ar_parent.object_id][node.base_klass][id]
+
+ if model
+ construct(model, node, row, rs, seen, model_cache, aliases)
+ else
+ model = construct_model(ar_parent, node, row, model_cache, id, aliases)
+
+ if node.reflection.scope_for(node.base_klass).readonly_value
+ model.readonly!
+ end
+
+ seen[ar_parent.object_id][node.base_klass][id] = model
+ construct(model, node, row, rs, seen, model_cache, aliases)
+ end
+ end
+ end
+
+ def construct_model(record, node, row, model_cache, id, aliases)
+ other = record.association(node.reflection.name)
+
+ model = model_cache[node][id] ||=
+ node.instantiate(row, aliases.column_aliases(node)) do |m|
+ other.set_inverse_instance(m)
+ end
+
+ if node.reflection.collection?
+ other.target.push(model)
+ else
+ other.target = model
+ end
+
+ model
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/join_dependency/join_association.rb b/activerecord/lib/active_record/associations/join_dependency/join_association.rb
new file mode 100644
index 0000000000..a526468bf6
--- /dev/null
+++ b/activerecord/lib/active_record/associations/join_dependency/join_association.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require_relative "join_part"
+
+module ActiveRecord
+ module Associations
+ class JoinDependency # :nodoc:
+ class JoinAssociation < JoinPart # :nodoc:
+ # The reflection of the association represented
+ attr_reader :reflection
+
+ attr_accessor :tables
+
+ def initialize(reflection, children)
+ super(reflection.klass, children)
+
+ @reflection = reflection
+ @tables = nil
+ end
+
+ def match?(other)
+ return true if self == other
+ super && reflection == other.reflection
+ end
+
+ def join_constraints(foreign_table, foreign_klass, join_type, tables, chain)
+ joins = []
+ tables = tables.reverse
+
+ # The chain starts with the target table, but we want to end with it here (makes
+ # more sense in this context), so we reverse
+ chain.reverse_each do |reflection|
+ table = tables.shift
+ klass = reflection.klass
+
+ constraint = reflection.build_join_constraint(table, foreign_table)
+
+ joins << table.create_join(table, table.create_on(constraint), join_type)
+
+ join_scope = reflection.join_scope(table, foreign_klass)
+
+ if join_scope.arel.constraints.any?
+ joins.concat join_scope.arel.join_sources
+ right = joins.last.right
+ right.expr = right.expr.and(join_scope.arel.constraints)
+ end
+
+ # The current table in this iteration becomes the foreign table in the next
+ foreign_table, foreign_klass = table, klass
+ end
+
+ joins
+ end
+
+ def table
+ tables.first
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/join_dependency/join_base.rb b/activerecord/lib/active_record/associations/join_dependency/join_base.rb
new file mode 100644
index 0000000000..8a8fa8993b
--- /dev/null
+++ b/activerecord/lib/active_record/associations/join_dependency/join_base.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require_relative "join_part"
+
+module ActiveRecord
+ module Associations
+ class JoinDependency # :nodoc:
+ class JoinBase < JoinPart # :nodoc:
+ attr_reader :table
+
+ def initialize(base_klass, table, children)
+ super(base_klass, children)
+ @table = table
+ end
+
+ def match?(other)
+ return true if self == other
+ super && base_klass == other.base_klass
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/join_dependency/join_part.rb b/activerecord/lib/active_record/associations/join_dependency/join_part.rb
new file mode 100644
index 0000000000..2181f308bf
--- /dev/null
+++ b/activerecord/lib/active_record/associations/join_dependency/join_part.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Associations
+ class JoinDependency # :nodoc:
+ # A JoinPart represents a part of a JoinDependency. It is inherited
+ # by JoinBase and JoinAssociation. A JoinBase represents the Active Record which
+ # everything else is being joined onto. A JoinAssociation represents an association which
+ # is joining to the base. A JoinAssociation may result in more than one actual join
+ # operations (for example a has_and_belongs_to_many JoinAssociation would result in
+ # two; one for the join table and one for the target table).
+ class JoinPart # :nodoc:
+ include Enumerable
+
+ # The Active Record class which this join part is associated 'about'; for a JoinBase
+ # this is the actual base model, for a JoinAssociation this is the target model of the
+ # association.
+ attr_reader :base_klass, :children
+
+ delegate :table_name, :column_names, :primary_key, to: :base_klass
+
+ def initialize(base_klass, children)
+ @base_klass = base_klass
+ @children = children
+ end
+
+ def match?(other)
+ self.class == other.class
+ end
+
+ def each(&block)
+ yield self
+ children.each { |child| child.each(&block) }
+ end
+
+ # An Arel::Table for the active_record
+ def table
+ raise NotImplementedError
+ end
+
+ def extract_record(row, column_names_with_alias)
+ # This code is performance critical as it is called per row.
+ # see: https://github.com/rails/rails/pull/12185
+ hash = {}
+
+ index = 0
+ length = column_names_with_alias.length
+
+ while index < length
+ column_name, alias_name = column_names_with_alias[index]
+ hash[column_name] = row[alias_name]
+ index += 1
+ end
+
+ hash
+ end
+
+ def instantiate(row, aliases, &block)
+ base_klass.instantiate(extract_record(row, aliases), &block)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader.rb b/activerecord/lib/active_record/associations/preloader.rb
new file mode 100644
index 0000000000..62caf02a2c
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader.rb
@@ -0,0 +1,204 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Associations
+ # Implements the details of eager loading of Active Record associations.
+ #
+ # Suppose that you have the following two Active Record models:
+ #
+ # class Author < ActiveRecord::Base
+ # # columns: name, age
+ # has_many :books
+ # end
+ #
+ # class Book < ActiveRecord::Base
+ # # columns: title, sales, author_id
+ # end
+ #
+ # When you load an author with all associated books Active Record will make
+ # multiple queries like this:
+ #
+ # Author.includes(:books).where(name: ['bell hooks', 'Homer']).to_a
+ #
+ # => SELECT `authors`.* FROM `authors` WHERE `name` IN ('bell hooks', 'Homer')
+ # => SELECT `books`.* FROM `books` WHERE `author_id` IN (2, 5)
+ #
+ # Active Record saves the ids of the records from the first query to use in
+ # the second. Depending on the number of associations involved there can be
+ # arbitrarily many SQL queries made.
+ #
+ # However, if there is a WHERE clause that spans across tables Active
+ # Record will fall back to a slightly more resource-intensive single query:
+ #
+ # Author.includes(:books).where(books: {title: 'Illiad'}).to_a
+ # => SELECT `authors`.`id` AS t0_r0, `authors`.`name` AS t0_r1, `authors`.`age` AS t0_r2,
+ # `books`.`id` AS t1_r0, `books`.`title` AS t1_r1, `books`.`sales` AS t1_r2
+ # FROM `authors`
+ # LEFT OUTER JOIN `books` ON `authors`.`id` = `books`.`author_id`
+ # WHERE `books`.`title` = 'Illiad'
+ #
+ # This could result in many rows that contain redundant data and it performs poorly at scale
+ # and is therefore only used when necessary.
+ #
+ class Preloader #:nodoc:
+ extend ActiveSupport::Autoload
+
+ eager_autoload do
+ autoload :Association, "active_record/associations/preloader/association"
+ autoload :SingularAssociation, "active_record/associations/preloader/singular_association"
+ autoload :CollectionAssociation, "active_record/associations/preloader/collection_association"
+ autoload :ThroughAssociation, "active_record/associations/preloader/through_association"
+
+ autoload :HasMany, "active_record/associations/preloader/has_many"
+ autoload :HasManyThrough, "active_record/associations/preloader/has_many_through"
+ autoload :HasOne, "active_record/associations/preloader/has_one"
+ autoload :HasOneThrough, "active_record/associations/preloader/has_one_through"
+ autoload :BelongsTo, "active_record/associations/preloader/belongs_to"
+ end
+
+ # Eager loads the named associations for the given Active Record record(s).
+ #
+ # In this description, 'association name' shall refer to the name passed
+ # to an association creation method. For example, a model that specifies
+ # <tt>belongs_to :author</tt>, <tt>has_many :buyers</tt> has association
+ # names +:author+ and +:buyers+.
+ #
+ # == Parameters
+ # +records+ is an array of ActiveRecord::Base. This array needs not be flat,
+ # i.e. +records+ itself may also contain arrays of records. In any case,
+ # +preload_associations+ will preload the all associations records by
+ # flattening +records+.
+ #
+ # +associations+ specifies one or more associations that you want to
+ # preload. It may be:
+ # - a Symbol or a String which specifies a single association name. For
+ # example, specifying +:books+ allows this method to preload all books
+ # for an Author.
+ # - an Array which specifies multiple association names. This array
+ # is processed recursively. For example, specifying <tt>[:avatar, :books]</tt>
+ # allows this method to preload an author's avatar as well as all of his
+ # books.
+ # - a Hash which specifies multiple association names, as well as
+ # association names for the to-be-preloaded association objects. For
+ # example, specifying <tt>{ author: :avatar }</tt> will preload a
+ # book's author, as well as that author's avatar.
+ #
+ # +:associations+ has the same format as the +:include+ option for
+ # <tt>ActiveRecord::Base.find</tt>. So +associations+ could look like this:
+ #
+ # :books
+ # [ :books, :author ]
+ # { author: :avatar }
+ # [ :books, { author: :avatar } ]
+ def preload(records, associations, preload_scope = nil)
+ records = Array.wrap(records).compact.uniq
+ associations = Array.wrap(associations)
+
+ if records.empty?
+ []
+ else
+ associations.flat_map { |association|
+ preloaders_on association, records, preload_scope
+ }
+ end
+ end
+
+ private
+
+ # Loads all the given data into +records+ for the +association+.
+ def preloaders_on(association, records, scope)
+ case association
+ when Hash
+ preloaders_for_hash(association, records, scope)
+ when Symbol
+ preloaders_for_one(association, records, scope)
+ when String
+ preloaders_for_one(association.to_sym, records, scope)
+ else
+ raise ArgumentError, "#{association.inspect} was not recognized for preload"
+ end
+ end
+
+ def preloaders_for_hash(association, records, scope)
+ association.flat_map { |parent, child|
+ loaders = preloaders_for_one parent, records, scope
+
+ recs = loaders.flat_map(&:preloaded_records).uniq
+ loaders.concat Array.wrap(child).flat_map { |assoc|
+ preloaders_on assoc, recs, scope
+ }
+ loaders
+ }
+ end
+
+ # Loads all the given data into +records+ for a singular +association+.
+ #
+ # Functions by instantiating a preloader class such as Preloader::HasManyThrough and
+ # call the +run+ method for each passed in class in the +records+ argument.
+ #
+ # Not all records have the same class, so group then preload group on the reflection
+ # itself so that if various subclass share the same association then we do not split
+ # them unnecessarily
+ #
+ # Additionally, polymorphic belongs_to associations can have multiple associated
+ # classes, depending on the polymorphic_type field. So we group by the classes as
+ # well.
+ def preloaders_for_one(association, records, scope)
+ grouped_records(association, records).flat_map do |reflection, klasses|
+ klasses.map do |rhs_klass, rs|
+ loader = preloader_for(reflection, rs).new(rhs_klass, rs, reflection, scope)
+ loader.run self
+ loader
+ end
+ end
+ end
+
+ def grouped_records(association, records)
+ h = {}
+ records.each do |record|
+ next unless record
+ assoc = record.association(association)
+ next unless assoc.klass
+ klasses = h[assoc.reflection] ||= {}
+ (klasses[assoc.klass] ||= []) << record
+ end
+ h
+ end
+
+ class AlreadyLoaded # :nodoc:
+ attr_reader :owners, :reflection
+
+ def initialize(klass, owners, reflection, preload_scope)
+ @owners = owners
+ @reflection = reflection
+ end
+
+ def run(preloader); end
+
+ def preloaded_records
+ owners.flat_map { |owner| owner.association(reflection.name).target }
+ end
+ end
+
+ # Returns a class containing the logic needed to load preload the data
+ # and attach it to a relation. For example +Preloader::Association+ or
+ # +Preloader::HasManyThrough+. The class returned implements a `run` method
+ # that accepts a preloader.
+ def preloader_for(reflection, owners)
+ if owners.first.association(reflection.name).loaded?
+ return AlreadyLoaded
+ end
+ reflection.check_preloadable!
+
+ case reflection.macro
+ when :has_many
+ reflection.options[:through] ? HasManyThrough : HasMany
+ when :has_one
+ reflection.options[:through] ? HasOneThrough : HasOne
+ when :belongs_to
+ BelongsTo
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/association.rb b/activerecord/lib/active_record/associations/preloader/association.rb
new file mode 100644
index 0000000000..5ba03c555a
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/association.rb
@@ -0,0 +1,143 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Associations
+ class Preloader
+ class Association #:nodoc:
+ attr_reader :owners, :reflection, :preload_scope, :model, :klass
+ attr_reader :preloaded_records
+
+ def initialize(klass, owners, reflection, preload_scope)
+ @klass = klass
+ @owners = owners
+ @reflection = reflection
+ @preload_scope = preload_scope
+ @model = owners.first && owners.first.class
+ @preloaded_records = []
+ end
+
+ def run(preloader)
+ preload(preloader)
+ end
+
+ def preload(preloader)
+ raise NotImplementedError
+ end
+
+ # The name of the key on the associated records
+ def association_key_name
+ raise NotImplementedError
+ end
+
+ # The name of the key on the model which declares the association
+ def owner_key_name
+ raise NotImplementedError
+ end
+
+ private
+ def options
+ reflection.options
+ end
+
+ def associated_records_by_owner(preloader)
+ records = load_records do |record|
+ owner = owners_by_key[convert_key(record[association_key_name])]
+ association = owner.association(reflection.name)
+ association.set_inverse_instance(record)
+ end
+
+ owners.each_with_object({}) do |owner, result|
+ result[owner] = records[convert_key(owner[owner_key_name])] || []
+ end
+ end
+
+ def owner_keys
+ unless defined?(@owner_keys)
+ @owner_keys = owners.map do |owner|
+ owner[owner_key_name]
+ end
+ @owner_keys.uniq!
+ @owner_keys.compact!
+ end
+ @owner_keys
+ end
+
+ def owners_by_key
+ unless defined?(@owners_by_key)
+ @owners_by_key = owners.each_with_object({}) do |owner, h|
+ h[convert_key(owner[owner_key_name])] = owner
+ end
+ end
+ @owners_by_key
+ end
+
+ def key_conversion_required?
+ @key_conversion_required ||= association_key_type != owner_key_type
+ end
+
+ def convert_key(key)
+ if key_conversion_required?
+ key.to_s
+ else
+ key
+ end
+ end
+
+ def association_key_type
+ @klass.type_for_attribute(association_key_name.to_s).type
+ end
+
+ def owner_key_type
+ @model.type_for_attribute(owner_key_name.to_s).type
+ end
+
+ def load_records(&block)
+ return {} if owner_keys.empty?
+ # Some databases impose a limit on the number of ids in a list (in Oracle it's 1000)
+ # Make several smaller queries if necessary or make one query if the adapter supports it
+ slices = owner_keys.each_slice(klass.connection.in_clause_length || owner_keys.size)
+ @preloaded_records = slices.flat_map do |slice|
+ records_for(slice, &block)
+ end
+ @preloaded_records.group_by do |record|
+ convert_key(record[association_key_name])
+ end
+ end
+
+ def records_for(ids, &block)
+ scope.where(association_key_name => ids).load(&block)
+ end
+
+ def scope
+ @scope ||= build_scope
+ end
+
+ def reflection_scope
+ @reflection_scope ||= reflection.scope_for(klass)
+ end
+
+ def klass_scope
+ current_scope = klass.current_scope
+
+ if current_scope && current_scope.empty_scope?
+ klass.unscoped
+ else
+ klass.default_scoped
+ end
+ end
+
+ def build_scope
+ scope = klass_scope
+
+ if reflection.type
+ scope.where!(reflection.type => model.base_class.sti_name)
+ end
+
+ scope.merge!(reflection_scope)
+ scope.merge!(preload_scope) if preload_scope
+ scope
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/belongs_to.rb b/activerecord/lib/active_record/associations/preloader/belongs_to.rb
new file mode 100644
index 0000000000..ae9695f26a
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/belongs_to.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Associations
+ class Preloader
+ class BelongsTo < SingularAssociation #:nodoc:
+ def association_key_name
+ options[:primary_key] || klass && klass.primary_key
+ end
+
+ def owner_key_name
+ reflection.foreign_key
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/collection_association.rb b/activerecord/lib/active_record/associations/preloader/collection_association.rb
new file mode 100644
index 0000000000..fb920a642c
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/collection_association.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Associations
+ class Preloader
+ class CollectionAssociation < Association #:nodoc:
+ private
+
+ def preload(preloader)
+ associated_records_by_owner(preloader).each do |owner, records|
+ association = owner.association(reflection.name)
+ association.loaded!
+ association.target.concat(records)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/has_many.rb b/activerecord/lib/active_record/associations/preloader/has_many.rb
new file mode 100644
index 0000000000..29a1ce099d
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/has_many.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Associations
+ class Preloader
+ class HasMany < CollectionAssociation #:nodoc:
+ def association_key_name
+ reflection.foreign_key
+ end
+
+ def owner_key_name
+ reflection.active_record_primary_key
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/has_many_through.rb b/activerecord/lib/active_record/associations/preloader/has_many_through.rb
new file mode 100644
index 0000000000..0639fdca44
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/has_many_through.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Associations
+ class Preloader
+ class HasManyThrough < CollectionAssociation #:nodoc:
+ include ThroughAssociation
+
+ def associated_records_by_owner(preloader)
+ records_by_owner = super
+
+ if reflection_scope.distinct_value
+ records_by_owner.each_value(&:uniq!)
+ end
+
+ records_by_owner
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/has_one.rb b/activerecord/lib/active_record/associations/preloader/has_one.rb
new file mode 100644
index 0000000000..d87abf630f
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/has_one.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Associations
+ class Preloader
+ class HasOne < SingularAssociation #:nodoc:
+ def association_key_name
+ reflection.foreign_key
+ end
+
+ def owner_key_name
+ reflection.active_record_primary_key
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/has_one_through.rb b/activerecord/lib/active_record/associations/preloader/has_one_through.rb
new file mode 100644
index 0000000000..17734d0257
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/has_one_through.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Associations
+ class Preloader
+ class HasOneThrough < SingularAssociation #:nodoc:
+ include ThroughAssociation
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/singular_association.rb b/activerecord/lib/active_record/associations/preloader/singular_association.rb
new file mode 100644
index 0000000000..266b5f6b1c
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/singular_association.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Associations
+ class Preloader
+ class SingularAssociation < Association #:nodoc:
+ private
+
+ def preload(preloader)
+ associated_records_by_owner(preloader).each do |owner, associated_records|
+ record = associated_records.first
+
+ association = owner.association(reflection.name)
+ association.target = record
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/through_association.rb b/activerecord/lib/active_record/associations/preloader/through_association.rb
new file mode 100644
index 0000000000..de4b847a41
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/through_association.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Associations
+ class Preloader
+ module ThroughAssociation #:nodoc:
+ def through_reflection
+ reflection.through_reflection
+ end
+
+ def source_reflection
+ reflection.source_reflection
+ end
+
+ def associated_records_by_owner(preloader)
+ through_scope = through_scope()
+
+ preloader.preload(owners,
+ through_reflection.name,
+ through_scope)
+
+ through_records = owners.map do |owner|
+ center = owner.association(through_reflection.name).target
+ [owner, Array(center)]
+ end
+
+ reset_association(owners, through_reflection.name, through_scope)
+
+ middle_records = through_records.flat_map(&:last)
+
+ preloaders = preloader.preload(middle_records,
+ source_reflection.name,
+ reflection_scope)
+
+ @preloaded_records = preloaders.flat_map(&:preloaded_records)
+
+ middle_to_pl = preloaders.each_with_object({}) do |pl, h|
+ pl.owners.each { |middle|
+ h[middle] = pl
+ }
+ end
+
+ through_records.each_with_object({}) do |(lhs, center), records_by_owner|
+ pl_to_middle = center.group_by { |record| middle_to_pl[record] }
+
+ records_by_owner[lhs] = pl_to_middle.flat_map do |pl, middles|
+ rhs_records = middles.flat_map { |r|
+ r.association(source_reflection.name).target
+ }.compact
+
+ # Respect the order on `reflection_scope` if it exists, else use the natural order.
+ if reflection_scope.values[:order].present?
+ @id_map ||= id_to_index_map @preloaded_records
+ rhs_records.sort_by { |rhs| @id_map[rhs] }
+ else
+ rhs_records
+ end
+ end
+ end
+ end
+
+ private
+
+ def id_to_index_map(ids)
+ id_map = {}
+ ids.each_with_index { |id, index| id_map[id] = index }
+ id_map
+ end
+
+ def reset_association(owners, association_name, through_scope)
+ should_reset = (through_scope != through_reflection.klass.unscoped) ||
+ (options[:source_type] && through_reflection.collection?)
+
+ # Don't cache the association - we would only be caching a subset
+ if should_reset
+ owners.each { |owner|
+ owner.association(association_name).reset
+ }
+ end
+ end
+
+ def through_scope
+ scope = through_reflection.klass.unscoped
+ values = reflection_scope.values
+
+ if options[:source_type]
+ scope.where! reflection.foreign_type => options[:source_type]
+ else
+ unless reflection_scope.where_clause.empty?
+ scope.includes_values = Array(values[:includes] || options[:source])
+ scope.where_clause = reflection_scope.where_clause
+ if joins = values[:joins]
+ scope.joins!(source_reflection.name => joins)
+ end
+ if left_outer_joins = values[:left_outer_joins]
+ scope.left_outer_joins!(source_reflection.name => left_outer_joins)
+ end
+ end
+
+ scope.references! values[:references]
+ if scope.eager_loading? && order_values = values[:order]
+ scope = scope.order(order_values)
+ end
+ end
+
+ scope
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/singular_association.rb b/activerecord/lib/active_record/associations/singular_association.rb
new file mode 100644
index 0000000000..c1eee3c630
--- /dev/null
+++ b/activerecord/lib/active_record/associations/singular_association.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Associations
+ class SingularAssociation < Association #:nodoc:
+ # Implements the reader method, e.g. foo.bar for Foo.has_one :bar
+ def reader
+ if !loaded? || stale_target?
+ reload
+ end
+
+ target
+ end
+
+ # Implements the writer method, e.g. foo.bar= for Foo.belongs_to :bar
+ def writer(record)
+ replace(record)
+ end
+
+ def build(attributes = {})
+ record = build_record(attributes)
+ yield(record) if block_given?
+ set_new_record(record)
+ record
+ end
+
+ # Implements the reload reader method, e.g. foo.reload_bar for
+ # Foo.has_one :bar
+ def force_reload_reader
+ klass.uncached { reload }
+ target
+ end
+
+ private
+ def scope_for_create
+ super.except!(klass.primary_key)
+ end
+
+ def find_target
+ scope = self.scope
+ return scope.take if skip_statement_cache?(scope)
+
+ conn = klass.connection
+ sc = reflection.association_scope_cache(conn, owner) do
+ StatementCache.create(conn) { |params|
+ as = AssociationScope.create { params.bind }
+ target_scope.merge!(as.scope(self)).limit(1)
+ }
+ end
+
+ binds = AssociationScope.get_bind_values(owner, reflection.chain)
+ sc.execute(binds, klass, conn) do |record|
+ set_inverse_instance record
+ end.first
+ rescue ::RangeError
+ nil
+ end
+
+ def replace(record)
+ raise NotImplementedError, "Subclasses must implement a replace(record) method"
+ end
+
+ def set_new_record(record)
+ replace(record)
+ end
+
+ def _create_record(attributes, raise_error = false)
+ unless owner.persisted?
+ raise ActiveRecord::RecordNotSaved, "You cannot call create unless the parent is saved"
+ end
+
+ record = build_record(attributes)
+ yield(record) if block_given?
+ saved = record.save
+ set_new_record(record)
+ raise RecordInvalid.new(record) if !saved && raise_error
+ record
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/through_association.rb b/activerecord/lib/active_record/associations/through_association.rb
new file mode 100644
index 0000000000..76237c4a0c
--- /dev/null
+++ b/activerecord/lib/active_record/associations/through_association.rb
@@ -0,0 +1,108 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record Through Association
+ module Associations
+ module ThroughAssociation #:nodoc:
+ delegate :source_reflection, :through_reflection, to: :reflection
+
+ private
+
+ # We merge in these scopes for two reasons:
+ #
+ # 1. To get the default_scope conditions for any of the other reflections in the chain
+ # 2. To get the type conditions for any STI models in the chain
+ def target_scope
+ scope = super
+ reflection.chain.drop(1).each do |reflection|
+ relation = reflection.klass.all
+ scope.merge!(
+ relation.except(:select, :create_with, :includes, :preload, :joins, :eager_load)
+ )
+ end
+ scope
+ end
+
+ # Construct attributes for :through pointing to owner and associate. This is used by the
+ # methods which create and delete records on the association.
+ #
+ # We only support indirectly modifying through associations which have a belongs_to source.
+ # This is the "has_many :tags, through: :taggings" situation, where the join model
+ # typically has a belongs_to on both side. In other words, associations which could also
+ # be represented as has_and_belongs_to_many associations.
+ #
+ # We do not support creating/deleting records on the association where the source has
+ # some other type, because this opens up a whole can of worms, and in basically any
+ # situation it is more natural for the user to just create or modify their join records
+ # directly as required.
+ def construct_join_attributes(*records)
+ ensure_mutable
+
+ if source_reflection.association_primary_key(reflection.klass) == reflection.klass.primary_key
+ join_attributes = { source_reflection.name => records }
+ else
+ join_attributes = {
+ source_reflection.foreign_key =>
+ records.map { |record|
+ record.send(source_reflection.association_primary_key(reflection.klass))
+ }
+ }
+ end
+
+ if options[:source_type]
+ join_attributes[source_reflection.foreign_type] =
+ records.map { |record| record.class.base_class.name }
+ end
+
+ if records.count == 1
+ Hash[join_attributes.map { |k, v| [k, v.first] }]
+ else
+ join_attributes
+ end
+ end
+
+ # Note: this does not capture all cases, for example it would be crazy to try to
+ # properly support stale-checking for nested associations.
+ def stale_state
+ if through_reflection.belongs_to?
+ owner[through_reflection.foreign_key] && owner[through_reflection.foreign_key].to_s
+ end
+ end
+
+ def foreign_key_present?
+ through_reflection.belongs_to? && !owner[through_reflection.foreign_key].nil?
+ end
+
+ def ensure_mutable
+ unless source_reflection.belongs_to?
+ if reflection.has_one?
+ raise HasOneThroughCantAssociateThroughHasOneOrManyReflection.new(owner, reflection)
+ else
+ raise HasManyThroughCantAssociateThroughHasOneOrManyReflection.new(owner, reflection)
+ end
+ end
+ end
+
+ def ensure_not_nested
+ if reflection.nested?
+ if reflection.has_one?
+ raise HasOneThroughNestedAssociationsAreReadonly.new(owner, reflection)
+ else
+ raise HasManyThroughNestedAssociationsAreReadonly.new(owner, reflection)
+ end
+ end
+ end
+
+ def build_record(attributes)
+ inverse = source_reflection.inverse_of
+ target = through_association.target
+
+ if inverse && target && !target.is_a?(Array)
+ attributes[inverse.foreign_key] = target.id
+ end
+
+ super(attributes)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute.rb b/activerecord/lib/active_record/attribute.rb
new file mode 100644
index 0000000000..fc474edc15
--- /dev/null
+++ b/activerecord/lib/active_record/attribute.rb
@@ -0,0 +1,242 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class Attribute # :nodoc:
+ class << self
+ def from_database(name, value, type)
+ FromDatabase.new(name, value, type)
+ end
+
+ def from_user(name, value, type, original_attribute = nil)
+ FromUser.new(name, value, type, original_attribute)
+ end
+
+ def with_cast_value(name, value, type)
+ WithCastValue.new(name, value, type)
+ end
+
+ def null(name)
+ Null.new(name)
+ end
+
+ def uninitialized(name, type)
+ Uninitialized.new(name, type)
+ end
+ end
+
+ attr_reader :name, :value_before_type_cast, :type
+
+ # This method should not be called directly.
+ # Use #from_database or #from_user
+ def initialize(name, value_before_type_cast, type, original_attribute = nil)
+ @name = name
+ @value_before_type_cast = value_before_type_cast
+ @type = type
+ @original_attribute = original_attribute
+ end
+
+ def value
+ # `defined?` is cheaper than `||=` when we get back falsy values
+ @value = type_cast(value_before_type_cast) unless defined?(@value)
+ @value
+ end
+
+ def original_value
+ if assigned?
+ original_attribute.original_value
+ else
+ type_cast(value_before_type_cast)
+ end
+ end
+
+ def value_for_database
+ type.serialize(value)
+ end
+
+ def changed?
+ changed_from_assignment? || changed_in_place?
+ end
+
+ def changed_in_place?
+ has_been_read? && type.changed_in_place?(original_value_for_database, value)
+ end
+
+ def forgetting_assignment
+ with_value_from_database(value_for_database)
+ end
+
+ def with_value_from_user(value)
+ type.assert_valid_value(value)
+ self.class.from_user(name, value, type, original_attribute || self)
+ end
+
+ def with_value_from_database(value)
+ self.class.from_database(name, value, type)
+ end
+
+ def with_cast_value(value)
+ self.class.with_cast_value(name, value, type)
+ end
+
+ def with_type(type)
+ if changed_in_place?
+ with_value_from_user(value).with_type(type)
+ else
+ self.class.new(name, value_before_type_cast, type, original_attribute)
+ end
+ end
+
+ def type_cast(*)
+ raise NotImplementedError
+ end
+
+ def initialized?
+ true
+ end
+
+ def came_from_user?
+ false
+ end
+
+ def has_been_read?
+ defined?(@value)
+ end
+
+ def ==(other)
+ self.class == other.class &&
+ name == other.name &&
+ value_before_type_cast == other.value_before_type_cast &&
+ type == other.type
+ end
+ alias eql? ==
+
+ def hash
+ [self.class, name, value_before_type_cast, type].hash
+ end
+
+ def init_with(coder)
+ @name = coder["name"]
+ @value_before_type_cast = coder["value_before_type_cast"]
+ @type = coder["type"]
+ @original_attribute = coder["original_attribute"]
+ @value = coder["value"] if coder.map.key?("value")
+ end
+
+ def encode_with(coder)
+ coder["name"] = name
+ coder["value_before_type_cast"] = value_before_type_cast unless value_before_type_cast.nil?
+ coder["type"] = type if type
+ coder["original_attribute"] = original_attribute if original_attribute
+ coder["value"] = value if defined?(@value)
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+
+ attr_reader :original_attribute
+ alias_method :assigned?, :original_attribute
+
+ def original_value_for_database
+ if assigned?
+ original_attribute.original_value_for_database
+ else
+ _original_value_for_database
+ end
+ end
+
+ private
+ def initialize_dup(other)
+ if defined?(@value) && @value.duplicable?
+ @value = @value.dup
+ end
+ end
+
+ def changed_from_assignment?
+ assigned? && type.changed?(original_value, value, value_before_type_cast)
+ end
+
+ def _original_value_for_database
+ type.serialize(original_value)
+ end
+
+ class FromDatabase < Attribute # :nodoc:
+ def type_cast(value)
+ type.deserialize(value)
+ end
+
+ def _original_value_for_database
+ value_before_type_cast
+ end
+ end
+
+ class FromUser < Attribute # :nodoc:
+ def type_cast(value)
+ type.cast(value)
+ end
+
+ def came_from_user?
+ !type.value_constructed_by_mass_assignment?(value_before_type_cast)
+ end
+ end
+
+ class WithCastValue < Attribute # :nodoc:
+ def type_cast(value)
+ value
+ end
+
+ def changed_in_place?
+ false
+ end
+ end
+
+ class Null < Attribute # :nodoc:
+ def initialize(name)
+ super(name, nil, Type.default_value)
+ end
+
+ def type_cast(*)
+ nil
+ end
+
+ def with_type(type)
+ self.class.with_cast_value(name, nil, type)
+ end
+
+ def with_value_from_database(value)
+ raise ActiveModel::MissingAttributeError, "can't write unknown attribute `#{name}`"
+ end
+ alias_method :with_value_from_user, :with_value_from_database
+ end
+
+ class Uninitialized < Attribute # :nodoc:
+ UNINITIALIZED_ORIGINAL_VALUE = Object.new
+
+ def initialize(name, type)
+ super(name, nil, type)
+ end
+
+ def value
+ if block_given?
+ yield name
+ end
+ end
+
+ def original_value
+ UNINITIALIZED_ORIGINAL_VALUE
+ end
+
+ def value_for_database
+ end
+
+ def initialized?
+ false
+ end
+
+ def with_type(type)
+ self.class.new(name, type)
+ end
+ end
+ private_constant :FromDatabase, :FromUser, :Null, :Uninitialized, :WithCastValue
+ end
+end
diff --git a/activerecord/lib/active_record/attribute/user_provided_default.rb b/activerecord/lib/active_record/attribute/user_provided_default.rb
new file mode 100644
index 0000000000..690a931615
--- /dev/null
+++ b/activerecord/lib/active_record/attribute/user_provided_default.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require_relative "../attribute"
+
+module ActiveRecord
+ class Attribute # :nodoc:
+ class UserProvidedDefault < FromUser # :nodoc:
+ def initialize(name, value, type, database_default)
+ @user_provided_value = value
+ super(name, value, type, database_default)
+ end
+
+ def value_before_type_cast
+ if user_provided_value.is_a?(Proc)
+ @memoized_value_before_type_cast ||= user_provided_value.call
+ else
+ @user_provided_value
+ end
+ end
+
+ def with_type(type)
+ self.class.new(name, user_provided_value, type, original_attribute)
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+
+ attr_reader :user_provided_value
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_assignment.rb b/activerecord/lib/active_record/attribute_assignment.rb
new file mode 100644
index 0000000000..8b0d9aab01
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_assignment.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+require "active_model/forbidden_attributes_protection"
+
+module ActiveRecord
+ module AttributeAssignment
+ extend ActiveSupport::Concern
+ include ActiveModel::AttributeAssignment
+
+ # Alias for ActiveModel::AttributeAssignment#assign_attributes. See ActiveModel::AttributeAssignment.
+ def attributes=(attributes)
+ assign_attributes(attributes)
+ end
+
+ private
+
+ def _assign_attributes(attributes)
+ multi_parameter_attributes = {}
+ nested_parameter_attributes = {}
+
+ attributes.each do |k, v|
+ if k.include?("(")
+ multi_parameter_attributes[k] = attributes.delete(k)
+ elsif v.is_a?(Hash)
+ nested_parameter_attributes[k] = attributes.delete(k)
+ end
+ end
+ super(attributes)
+
+ assign_nested_parameter_attributes(nested_parameter_attributes) unless nested_parameter_attributes.empty?
+ assign_multiparameter_attributes(multi_parameter_attributes) unless multi_parameter_attributes.empty?
+ end
+
+ # Assign any deferred nested attributes after the base attributes have been set.
+ def assign_nested_parameter_attributes(pairs)
+ pairs.each { |k, v| _assign_attribute(k, v) }
+ end
+
+ # Instantiates objects for all attribute classes that needs more than one constructor parameter. This is done
+ # by calling new on the column type or aggregation type (through composed_of) object with these parameters.
+ # So having the pairs written_on(1) = "2004", written_on(2) = "6", written_on(3) = "24", will instantiate
+ # written_on (a date type) with Date.new("2004", "6", "24"). You can also specify a typecast character in the
+ # parentheses to have the parameters typecasted before they're used in the constructor. Use i for Integer and
+ # f for Float. If all the values for a given attribute are empty, the attribute will be set to +nil+.
+ def assign_multiparameter_attributes(pairs)
+ execute_callstack_for_multiparameter_attributes(
+ extract_callstack_for_multiparameter_attributes(pairs)
+ )
+ end
+
+ def execute_callstack_for_multiparameter_attributes(callstack)
+ errors = []
+ callstack.each do |name, values_with_empty_parameters|
+ begin
+ if values_with_empty_parameters.each_value.all?(&:nil?)
+ values = nil
+ else
+ values = values_with_empty_parameters
+ end
+ send("#{name}=", values)
+ rescue => ex
+ errors << AttributeAssignmentError.new("error on assignment #{values_with_empty_parameters.values.inspect} to #{name} (#{ex.message})", ex, name)
+ end
+ end
+ unless errors.empty?
+ error_descriptions = errors.map(&:message).join(",")
+ raise MultiparameterAssignmentErrors.new(errors), "#{errors.size} error(s) on assignment of multiparameter attributes [#{error_descriptions}]"
+ end
+ end
+
+ def extract_callstack_for_multiparameter_attributes(pairs)
+ attributes = {}
+
+ pairs.each do |(multiparameter_name, value)|
+ attribute_name = multiparameter_name.split("(").first
+ attributes[attribute_name] ||= {}
+
+ parameter_value = value.empty? ? nil : type_cast_attribute_value(multiparameter_name, value)
+ attributes[attribute_name][find_parameter_position(multiparameter_name)] ||= parameter_value
+ end
+
+ attributes
+ end
+
+ def type_cast_attribute_value(multiparameter_name, value)
+ multiparameter_name =~ /\([0-9]*([if])\)/ ? value.send("to_" + $1) : value
+ end
+
+ def find_parameter_position(multiparameter_name)
+ multiparameter_name.scan(/\(([0-9]*).*\)/).first.first.to_i
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_decorators.rb b/activerecord/lib/active_record/attribute_decorators.rb
new file mode 100644
index 0000000000..98b7805c0a
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_decorators.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module AttributeDecorators # :nodoc:
+ extend ActiveSupport::Concern
+
+ included do
+ class_attribute :attribute_type_decorations, instance_accessor: false, default: TypeDecorator.new # :internal:
+ end
+
+ module ClassMethods # :nodoc:
+ # This method is an internal API used to create class macros such as
+ # +serialize+, and features like time zone aware attributes.
+ #
+ # Used to wrap the type of an attribute in a new type.
+ # When the schema for a model is loaded, attributes with the same name as
+ # +column_name+ will have their type yielded to the given block. The
+ # return value of that block will be used instead.
+ #
+ # Subsequent calls where +column_name+ and +decorator_name+ are the same
+ # will override the previous decorator, not decorate twice. This can be
+ # used to create idempotent class macros like +serialize+
+ def decorate_attribute_type(column_name, decorator_name, &block)
+ matcher = ->(name, _) { name == column_name.to_s }
+ key = "_#{column_name}_#{decorator_name}"
+ decorate_matching_attribute_types(matcher, key, &block)
+ end
+
+ # This method is an internal API used to create higher level features like
+ # time zone aware attributes.
+ #
+ # When the schema for a model is loaded, +matcher+ will be called for each
+ # attribute with its name and type. If the matcher returns a truthy value,
+ # the type will then be yielded to the given block, and the return value
+ # of that block will replace the type.
+ #
+ # Subsequent calls to this method with the same value for +decorator_name+
+ # will replace the previous decorator, not decorate twice. This can be
+ # used to ensure that class macros are idempotent.
+ def decorate_matching_attribute_types(matcher, decorator_name, &block)
+ reload_schema_from_cache
+ decorator_name = decorator_name.to_s
+
+ # Create new hashes so we don't modify parent classes
+ self.attribute_type_decorations = attribute_type_decorations.merge(decorator_name => [matcher, block])
+ end
+
+ private
+
+ def load_schema!
+ super
+ attribute_types.each do |name, type|
+ decorated_type = attribute_type_decorations.apply(name, type)
+ define_attribute(name, decorated_type)
+ end
+ end
+ end
+
+ class TypeDecorator # :nodoc:
+ delegate :clear, to: :@decorations
+
+ def initialize(decorations = {})
+ @decorations = decorations
+ end
+
+ def merge(*args)
+ TypeDecorator.new(@decorations.merge(*args))
+ end
+
+ def apply(name, type)
+ decorations = decorators_for(name, type)
+ decorations.inject(type) do |new_type, block|
+ block.call(new_type)
+ end
+ end
+
+ private
+
+ def decorators_for(name, type)
+ matching(name, type).map(&:last)
+ end
+
+ def matching(name, type)
+ @decorations.values.select do |(matcher, _)|
+ matcher.call(name, type)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods.rb b/activerecord/lib/active_record/attribute_methods.rb
new file mode 100644
index 0000000000..e4ca6c8408
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods.rb
@@ -0,0 +1,449 @@
+# frozen_string_literal: true
+
+require "mutex_m"
+
+module ActiveRecord
+ # = Active Record Attribute Methods
+ module AttributeMethods
+ extend ActiveSupport::Concern
+ include ActiveModel::AttributeMethods
+
+ included do
+ initialize_generated_modules
+ include Read
+ include Write
+ include BeforeTypeCast
+ include Query
+ include PrimaryKey
+ include TimeZoneConversion
+ include Dirty
+ include Serialization
+
+ delegate :column_for_attribute, to: :class
+ end
+
+ AttrNames = Module.new {
+ def self.set_name_cache(name, value)
+ const_name = "ATTR_#{name}"
+ unless const_defined? const_name
+ const_set const_name, value.dup.freeze
+ end
+ end
+ }
+
+ BLACKLISTED_CLASS_METHODS = %w(private public protected allocate new name parent superclass)
+
+ class GeneratedAttributeMethods < Module; end # :nodoc:
+
+ module ClassMethods
+ def inherited(child_class) #:nodoc:
+ child_class.initialize_generated_modules
+ super
+ end
+
+ def initialize_generated_modules # :nodoc:
+ @generated_attribute_methods = GeneratedAttributeMethods.new { extend Mutex_m }
+ @attribute_methods_generated = false
+ include @generated_attribute_methods
+
+ super
+ end
+
+ # Generates all the attribute related methods for columns in the database
+ # accessors, mutators and query methods.
+ def define_attribute_methods # :nodoc:
+ return false if @attribute_methods_generated
+ # Use a mutex; we don't want two threads simultaneously trying to define
+ # attribute methods.
+ generated_attribute_methods.synchronize do
+ return false if @attribute_methods_generated
+ superclass.define_attribute_methods unless self == base_class
+ super(attribute_names)
+ @attribute_methods_generated = true
+ end
+ end
+
+ def undefine_attribute_methods # :nodoc:
+ generated_attribute_methods.synchronize do
+ super if defined?(@attribute_methods_generated) && @attribute_methods_generated
+ @attribute_methods_generated = false
+ end
+ end
+
+ # Raises an ActiveRecord::DangerousAttributeError exception when an
+ # \Active \Record method is defined in the model, otherwise +false+.
+ #
+ # class Person < ActiveRecord::Base
+ # def save
+ # 'already defined by Active Record'
+ # end
+ # end
+ #
+ # Person.instance_method_already_implemented?(:save)
+ # # => ActiveRecord::DangerousAttributeError: save is defined by Active Record. Check to make sure that you don't have an attribute or method with the same name.
+ #
+ # Person.instance_method_already_implemented?(:name)
+ # # => false
+ def instance_method_already_implemented?(method_name)
+ if dangerous_attribute_method?(method_name)
+ raise DangerousAttributeError, "#{method_name} is defined by Active Record. Check to make sure that you don't have an attribute or method with the same name."
+ end
+
+ if superclass == Base
+ super
+ else
+ # If ThisClass < ... < SomeSuperClass < ... < Base and SomeSuperClass
+ # defines its own attribute method, then we don't want to overwrite that.
+ defined = method_defined_within?(method_name, superclass, Base) &&
+ ! superclass.instance_method(method_name).owner.is_a?(GeneratedAttributeMethods)
+ defined || super
+ end
+ end
+
+ # A method name is 'dangerous' if it is already (re)defined by Active Record, but
+ # not by any ancestors. (So 'puts' is not dangerous but 'save' is.)
+ def dangerous_attribute_method?(name) # :nodoc:
+ method_defined_within?(name, Base)
+ end
+
+ def method_defined_within?(name, klass, superklass = klass.superclass) # :nodoc:
+ if klass.method_defined?(name) || klass.private_method_defined?(name)
+ if superklass.method_defined?(name) || superklass.private_method_defined?(name)
+ klass.instance_method(name).owner != superklass.instance_method(name).owner
+ else
+ true
+ end
+ else
+ false
+ end
+ end
+
+ # A class method is 'dangerous' if it is already (re)defined by Active Record, but
+ # not by any ancestors. (So 'puts' is not dangerous but 'new' is.)
+ def dangerous_class_method?(method_name)
+ BLACKLISTED_CLASS_METHODS.include?(method_name.to_s) || class_method_defined_within?(method_name, Base)
+ end
+
+ def class_method_defined_within?(name, klass, superklass = klass.superclass) # :nodoc:
+ if klass.respond_to?(name, true)
+ if superklass.respond_to?(name, true)
+ klass.method(name).owner != superklass.method(name).owner
+ else
+ true
+ end
+ else
+ false
+ end
+ end
+
+ # Returns +true+ if +attribute+ is an attribute method and table exists,
+ # +false+ otherwise.
+ #
+ # class Person < ActiveRecord::Base
+ # end
+ #
+ # Person.attribute_method?('name') # => true
+ # Person.attribute_method?(:age=) # => true
+ # Person.attribute_method?(:nothing) # => false
+ def attribute_method?(attribute)
+ super || (table_exists? && column_names.include?(attribute.to_s.sub(/=$/, "")))
+ end
+
+ # Returns an array of column names as strings if it's not an abstract class and
+ # table exists. Otherwise it returns an empty array.
+ #
+ # class Person < ActiveRecord::Base
+ # end
+ #
+ # Person.attribute_names
+ # # => ["id", "created_at", "updated_at", "name", "age"]
+ def attribute_names
+ @attribute_names ||= if !abstract_class? && table_exists?
+ attribute_types.keys
+ else
+ []
+ end
+ end
+
+ # Returns true if the given attribute exists, otherwise false.
+ #
+ # class Person < ActiveRecord::Base
+ # end
+ #
+ # Person.has_attribute?('name') # => true
+ # Person.has_attribute?(:age) # => true
+ # Person.has_attribute?(:nothing) # => false
+ def has_attribute?(attr_name)
+ attribute_types.key?(attr_name.to_s)
+ end
+
+ # Returns the column object for the named attribute.
+ # Returns a +ActiveRecord::ConnectionAdapters::NullColumn+ if the
+ # named attribute does not exist.
+ #
+ # class Person < ActiveRecord::Base
+ # end
+ #
+ # person = Person.new
+ # person.column_for_attribute(:name) # the result depends on the ConnectionAdapter
+ # # => #<ActiveRecord::ConnectionAdapters::Column:0x007ff4ab083980 @name="name", @sql_type="varchar(255)", @null=true, ...>
+ #
+ # person.column_for_attribute(:nothing)
+ # # => #<ActiveRecord::ConnectionAdapters::NullColumn:0xXXX @name=nil, @sql_type=nil, @cast_type=#<Type::Value>, ...>
+ def column_for_attribute(name)
+ name = name.to_s
+ columns_hash.fetch(name) do
+ ConnectionAdapters::NullColumn.new(name)
+ end
+ end
+ end
+
+ # A Person object with a name attribute can ask <tt>person.respond_to?(:name)</tt>,
+ # <tt>person.respond_to?(:name=)</tt>, and <tt>person.respond_to?(:name?)</tt>
+ # which will all return +true+. It also defines the attribute methods if they have
+ # not been generated.
+ #
+ # class Person < ActiveRecord::Base
+ # end
+ #
+ # person = Person.new
+ # person.respond_to?(:name) # => true
+ # person.respond_to?(:name=) # => true
+ # person.respond_to?(:name?) # => true
+ # person.respond_to?('age') # => true
+ # person.respond_to?('age=') # => true
+ # person.respond_to?('age?') # => true
+ # person.respond_to?(:nothing) # => false
+ def respond_to?(name, include_private = false)
+ return false unless super
+
+ case name
+ when :to_partial_path
+ name = "to_partial_path".freeze
+ when :to_model
+ name = "to_model".freeze
+ else
+ name = name.to_s
+ end
+
+ # If the result is true then check for the select case.
+ # For queries selecting a subset of columns, return false for unselected columns.
+ # We check defined?(@attributes) not to issue warnings if called on objects that
+ # have been allocated but not yet initialized.
+ if defined?(@attributes) && self.class.column_names.include?(name)
+ return has_attribute?(name)
+ end
+
+ return true
+ end
+
+ # Returns +true+ if the given attribute is in the attributes hash, otherwise +false+.
+ #
+ # class Person < ActiveRecord::Base
+ # end
+ #
+ # person = Person.new
+ # person.has_attribute?(:name) # => true
+ # person.has_attribute?('age') # => true
+ # person.has_attribute?(:nothing) # => false
+ def has_attribute?(attr_name)
+ @attributes.key?(attr_name.to_s)
+ end
+
+ # Returns an array of names for the attributes available on this object.
+ #
+ # class Person < ActiveRecord::Base
+ # end
+ #
+ # person = Person.new
+ # person.attribute_names
+ # # => ["id", "created_at", "updated_at", "name", "age"]
+ def attribute_names
+ @attributes.keys
+ end
+
+ # Returns a hash of all the attributes with their names as keys and the values of the attributes as values.
+ #
+ # class Person < ActiveRecord::Base
+ # end
+ #
+ # person = Person.create(name: 'Francesco', age: 22)
+ # person.attributes
+ # # => {"id"=>3, "created_at"=>Sun, 21 Oct 2012 04:53:04, "updated_at"=>Sun, 21 Oct 2012 04:53:04, "name"=>"Francesco", "age"=>22}
+ def attributes
+ @attributes.to_hash
+ end
+
+ # Returns an <tt>#inspect</tt>-like string for the value of the
+ # attribute +attr_name+. String attributes are truncated up to 50
+ # characters, Date and Time attributes are returned in the
+ # <tt>:db</tt> format. Other attributes return the value of
+ # <tt>#inspect</tt> without modification.
+ #
+ # person = Person.create!(name: 'David Heinemeier Hansson ' * 3)
+ #
+ # person.attribute_for_inspect(:name)
+ # # => "\"David Heinemeier Hansson David Heinemeier Hansson ...\""
+ #
+ # person.attribute_for_inspect(:created_at)
+ # # => "\"2012-10-22 00:15:07\""
+ #
+ # person.attribute_for_inspect(:tag_ids)
+ # # => "[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]"
+ def attribute_for_inspect(attr_name)
+ value = read_attribute(attr_name)
+
+ if value.is_a?(String) && value.length > 50
+ "#{value[0, 50]}...".inspect
+ elsif value.is_a?(Date) || value.is_a?(Time)
+ %("#{value.to_s(:db)}")
+ else
+ value.inspect
+ end
+ end
+
+ # Returns +true+ if the specified +attribute+ has been set by the user or by a
+ # database load and is neither +nil+ nor <tt>empty?</tt> (the latter only applies
+ # to objects that respond to <tt>empty?</tt>, most notably Strings). Otherwise, +false+.
+ # Note that it always returns +true+ with boolean attributes.
+ #
+ # class Task < ActiveRecord::Base
+ # end
+ #
+ # task = Task.new(title: '', is_done: false)
+ # task.attribute_present?(:title) # => false
+ # task.attribute_present?(:is_done) # => true
+ # task.title = 'Buy milk'
+ # task.is_done = true
+ # task.attribute_present?(:title) # => true
+ # task.attribute_present?(:is_done) # => true
+ def attribute_present?(attribute)
+ value = _read_attribute(attribute)
+ !value.nil? && !(value.respond_to?(:empty?) && value.empty?)
+ end
+
+ # Returns the value of the attribute identified by <tt>attr_name</tt> after it has been typecast (for example,
+ # "2004-12-12" in a date column is cast to a date object, like Date.new(2004, 12, 12)). It raises
+ # <tt>ActiveModel::MissingAttributeError</tt> if the identified attribute is missing.
+ #
+ # Note: +:id+ is always present.
+ #
+ # class Person < ActiveRecord::Base
+ # belongs_to :organization
+ # end
+ #
+ # person = Person.new(name: 'Francesco', age: '22')
+ # person[:name] # => "Francesco"
+ # person[:age] # => 22
+ #
+ # person = Person.select('id').first
+ # person[:name] # => ActiveModel::MissingAttributeError: missing attribute: name
+ # person[:organization_id] # => ActiveModel::MissingAttributeError: missing attribute: organization_id
+ def [](attr_name)
+ read_attribute(attr_name) { |n| missing_attribute(n, caller) }
+ end
+
+ # Updates the attribute identified by <tt>attr_name</tt> with the specified +value+.
+ # (Alias for the protected #write_attribute method).
+ #
+ # class Person < ActiveRecord::Base
+ # end
+ #
+ # person = Person.new
+ # person[:age] = '22'
+ # person[:age] # => 22
+ # person[:age].class # => Integer
+ def []=(attr_name, value)
+ write_attribute(attr_name, value)
+ end
+
+ # Returns the name of all database fields which have been read from this
+ # model. This can be useful in development mode to determine which fields
+ # need to be selected. For performance critical pages, selecting only the
+ # required fields can be an easy performance win (assuming you aren't using
+ # all of the fields on the model).
+ #
+ # For example:
+ #
+ # class PostsController < ActionController::Base
+ # after_action :print_accessed_fields, only: :index
+ #
+ # def index
+ # @posts = Post.all
+ # end
+ #
+ # private
+ #
+ # def print_accessed_fields
+ # p @posts.first.accessed_fields
+ # end
+ # end
+ #
+ # Which allows you to quickly change your code to:
+ #
+ # class PostsController < ActionController::Base
+ # def index
+ # @posts = Post.select(:id, :title, :author_id, :updated_at)
+ # end
+ # end
+ def accessed_fields
+ @attributes.accessed
+ end
+
+ protected
+
+ def attribute_method?(attr_name) # :nodoc:
+ # We check defined? because Syck calls respond_to? before actually calling initialize.
+ defined?(@attributes) && @attributes.key?(attr_name)
+ end
+
+ private
+
+ def arel_attributes_with_values_for_create(attribute_names)
+ arel_attributes_with_values(attributes_for_create(attribute_names))
+ end
+
+ def arel_attributes_with_values_for_update(attribute_names)
+ arel_attributes_with_values(attributes_for_update(attribute_names))
+ end
+
+ # Returns a Hash of the Arel::Attributes and attribute values that have been
+ # typecasted for use in an Arel insert/update method.
+ def arel_attributes_with_values(attribute_names)
+ attrs = {}
+ arel_table = self.class.arel_table
+
+ attribute_names.each do |name|
+ attrs[arel_table[name]] = typecasted_attribute_value(name)
+ end
+ attrs
+ end
+
+ # Filters the primary keys and readonly attributes from the attribute names.
+ def attributes_for_update(attribute_names)
+ attribute_names.reject do |name|
+ readonly_attribute?(name)
+ end
+ end
+
+ # Filters out the primary keys, from the attribute names, when the primary
+ # key is to be generated (e.g. the id attribute has no value).
+ def attributes_for_create(attribute_names)
+ attribute_names.reject do |name|
+ pk_attribute?(name) && id.nil?
+ end
+ end
+
+ def readonly_attribute?(name)
+ self.class.readonly_attributes.include?(name)
+ end
+
+ def pk_attribute?(name)
+ name == self.class.primary_key
+ end
+
+ def typecasted_attribute_value(name)
+ _read_attribute(name)
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods/before_type_cast.rb b/activerecord/lib/active_record/attribute_methods/before_type_cast.rb
new file mode 100644
index 0000000000..5941f51a1a
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods/before_type_cast.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module AttributeMethods
+ # = Active Record Attribute Methods Before Type Cast
+ #
+ # ActiveRecord::AttributeMethods::BeforeTypeCast provides a way to
+ # read the value of the attributes before typecasting and deserialization.
+ #
+ # class Task < ActiveRecord::Base
+ # end
+ #
+ # task = Task.new(id: '1', completed_on: '2012-10-21')
+ # task.id # => 1
+ # task.completed_on # => Sun, 21 Oct 2012
+ #
+ # task.attributes_before_type_cast
+ # # => {"id"=>"1", "completed_on"=>"2012-10-21", ... }
+ # task.read_attribute_before_type_cast('id') # => "1"
+ # task.read_attribute_before_type_cast('completed_on') # => "2012-10-21"
+ #
+ # In addition to #read_attribute_before_type_cast and #attributes_before_type_cast,
+ # it declares a method for all attributes with the <tt>*_before_type_cast</tt>
+ # suffix.
+ #
+ # task.id_before_type_cast # => "1"
+ # task.completed_on_before_type_cast # => "2012-10-21"
+ module BeforeTypeCast
+ extend ActiveSupport::Concern
+
+ included do
+ attribute_method_suffix "_before_type_cast"
+ attribute_method_suffix "_came_from_user?"
+ end
+
+ # Returns the value of the attribute identified by +attr_name+ before
+ # typecasting and deserialization.
+ #
+ # class Task < ActiveRecord::Base
+ # end
+ #
+ # task = Task.new(id: '1', completed_on: '2012-10-21')
+ # task.read_attribute('id') # => 1
+ # task.read_attribute_before_type_cast('id') # => '1'
+ # task.read_attribute('completed_on') # => Sun, 21 Oct 2012
+ # task.read_attribute_before_type_cast('completed_on') # => "2012-10-21"
+ # task.read_attribute_before_type_cast(:completed_on) # => "2012-10-21"
+ def read_attribute_before_type_cast(attr_name)
+ @attributes[attr_name.to_s].value_before_type_cast
+ end
+
+ # Returns a hash of attributes before typecasting and deserialization.
+ #
+ # class Task < ActiveRecord::Base
+ # end
+ #
+ # task = Task.new(title: nil, is_done: true, completed_on: '2012-10-21')
+ # task.attributes
+ # # => {"id"=>nil, "title"=>nil, "is_done"=>true, "completed_on"=>Sun, 21 Oct 2012, "created_at"=>nil, "updated_at"=>nil}
+ # task.attributes_before_type_cast
+ # # => {"id"=>nil, "title"=>nil, "is_done"=>true, "completed_on"=>"2012-10-21", "created_at"=>nil, "updated_at"=>nil}
+ def attributes_before_type_cast
+ @attributes.values_before_type_cast
+ end
+
+ private
+
+ # Handle *_before_type_cast for method_missing.
+ def attribute_before_type_cast(attribute_name)
+ read_attribute_before_type_cast(attribute_name)
+ end
+
+ def attribute_came_from_user?(attribute_name)
+ @attributes[attribute_name].came_from_user?
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods/dirty.rb b/activerecord/lib/active_record/attribute_methods/dirty.rb
new file mode 100644
index 0000000000..5efe051125
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods/dirty.rb
@@ -0,0 +1,252 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/module/attribute_accessors"
+require_relative "../attribute_mutation_tracker"
+
+module ActiveRecord
+ module AttributeMethods
+ module Dirty # :nodoc:
+ extend ActiveSupport::Concern
+
+ include ActiveModel::Dirty
+
+ included do
+ if self < ::ActiveRecord::Timestamp
+ raise "You cannot include Dirty after Timestamp"
+ end
+
+ class_attribute :partial_writes, instance_writer: false, default: true
+
+ after_create { changes_applied }
+ after_update { changes_applied }
+
+ # Attribute methods for "changed in last call to save?"
+ attribute_method_affix(prefix: "saved_change_to_", suffix: "?")
+ attribute_method_prefix("saved_change_to_")
+ attribute_method_suffix("_before_last_save")
+
+ # Attribute methods for "will change if I call save?"
+ attribute_method_affix(prefix: "will_save_change_to_", suffix: "?")
+ attribute_method_suffix("_change_to_be_saved", "_in_database")
+ end
+
+ # <tt>reload</tt> the record and clears changed attributes.
+ def reload(*)
+ super.tap do
+ @mutations_before_last_save = nil
+ clear_mutation_trackers
+ @changed_attributes = ActiveSupport::HashWithIndifferentAccess.new
+ end
+ end
+
+ def initialize_dup(other) # :nodoc:
+ super
+ @attributes = self.class._default_attributes.map do |attr|
+ attr.with_value_from_user(@attributes.fetch_value(attr.name))
+ end
+ clear_mutation_trackers
+ end
+
+ def changes_applied
+ @mutations_before_last_save = mutation_tracker
+ @mutations_from_database = AttributeMutationTracker.new(@attributes)
+ @changed_attributes = ActiveSupport::HashWithIndifferentAccess.new
+ forget_attribute_assignments
+ clear_mutation_trackers
+ end
+
+ def clear_changes_information
+ @mutations_before_last_save = nil
+ @changed_attributes = ActiveSupport::HashWithIndifferentAccess.new
+ forget_attribute_assignments
+ clear_mutation_trackers
+ end
+
+ def write_attribute_without_type_cast(attr_name, *)
+ result = super
+ clear_attribute_change(attr_name)
+ result
+ end
+
+ def clear_attribute_changes(attr_names)
+ super
+ attr_names.each do |attr_name|
+ clear_attribute_change(attr_name)
+ end
+ end
+
+ def changed_attributes
+ # This should only be set by methods which will call changed_attributes
+ # multiple times when it is known that the computed value cannot change.
+ if defined?(@cached_changed_attributes)
+ @cached_changed_attributes
+ else
+ super.reverse_merge(mutation_tracker.changed_values).freeze
+ end
+ end
+
+ def changes
+ cache_changed_attributes do
+ super
+ end
+ end
+
+ def previous_changes
+ mutations_before_last_save.changes
+ end
+
+ def attribute_changed_in_place?(attr_name)
+ mutation_tracker.changed_in_place?(attr_name)
+ end
+
+ # Did this attribute change when we last saved? This method can be invoked
+ # as `saved_change_to_name?` instead of `saved_change_to_attribute?("name")`.
+ # Behaves similarly to +attribute_changed?+. This method is useful in
+ # after callbacks to determine if the call to save changed a certain
+ # attribute.
+ #
+ # ==== Options
+ #
+ # +from+ When passed, this method will return false unless the original
+ # value is equal to the given option
+ #
+ # +to+ When passed, this method will return false unless the value was
+ # changed to the given value
+ def saved_change_to_attribute?(attr_name, **options)
+ mutations_before_last_save.changed?(attr_name, **options)
+ end
+
+ # Returns the change to an attribute during the last save. If the
+ # attribute was changed, the result will be an array containing the
+ # original value and the saved value.
+ #
+ # Behaves similarly to +attribute_change+. This method is useful in after
+ # callbacks, to see the change in an attribute that just occurred
+ #
+ # This method can be invoked as `saved_change_to_name` in instead of
+ # `saved_change_to_attribute("name")`
+ def saved_change_to_attribute(attr_name)
+ mutations_before_last_save.change_to_attribute(attr_name)
+ end
+
+ # Returns the original value of an attribute before the last save.
+ # Behaves similarly to +attribute_was+. This method is useful in after
+ # callbacks to get the original value of an attribute before the save that
+ # just occurred
+ def attribute_before_last_save(attr_name)
+ mutations_before_last_save.original_value(attr_name)
+ end
+
+ # Did the last call to `save` have any changes to change?
+ def saved_changes?
+ mutations_before_last_save.any_changes?
+ end
+
+ # Returns a hash containing all the changes that were just saved.
+ def saved_changes
+ mutations_before_last_save.changes
+ end
+
+ # Alias for `attribute_changed?`
+ def will_save_change_to_attribute?(attr_name, **options)
+ mutations_from_database.changed?(attr_name, **options)
+ end
+
+ # Alias for `attribute_change`
+ def attribute_change_to_be_saved(attr_name)
+ mutations_from_database.change_to_attribute(attr_name)
+ end
+
+ # Alias for `attribute_was`
+ def attribute_in_database(attr_name)
+ mutations_from_database.original_value(attr_name)
+ end
+
+ # Alias for `changed?`
+ def has_changes_to_save?
+ mutations_from_database.any_changes?
+ end
+
+ # Alias for `changes`
+ def changes_to_save
+ mutations_from_database.changes
+ end
+
+ # Alias for `changed`
+ def changed_attribute_names_to_save
+ changes_to_save.keys
+ end
+
+ # Alias for `changed_attributes`
+ def attributes_in_database
+ changes_to_save.transform_values(&:first)
+ end
+
+ private
+
+ def mutation_tracker
+ unless defined?(@mutation_tracker)
+ @mutation_tracker = nil
+ end
+ @mutation_tracker ||= AttributeMutationTracker.new(@attributes)
+ end
+
+ def mutations_from_database
+ unless defined?(@mutations_from_database)
+ @mutations_from_database = nil
+ end
+ @mutations_from_database ||= mutation_tracker
+ end
+
+ def changes_include?(attr_name)
+ super || mutation_tracker.changed?(attr_name)
+ end
+
+ def clear_attribute_change(attr_name)
+ mutation_tracker.forget_change(attr_name)
+ mutations_from_database.forget_change(attr_name)
+ end
+
+ def attribute_will_change!(attr_name)
+ super
+ mutations_from_database.force_change(attr_name)
+ end
+
+ def _update_record(*)
+ partial_writes? ? super(keys_for_partial_write) : super
+ end
+
+ def _create_record(*)
+ partial_writes? ? super(keys_for_partial_write) : super
+ end
+
+ def keys_for_partial_write
+ changed_attribute_names_to_save & self.class.column_names
+ end
+
+ def forget_attribute_assignments
+ @attributes = @attributes.map(&:forgetting_assignment)
+ end
+
+ def clear_mutation_trackers
+ @mutation_tracker = nil
+ @mutations_from_database = nil
+ end
+
+ def mutations_before_last_save
+ @mutations_before_last_save ||= NullMutationTracker.instance
+ end
+
+ def cache_changed_attributes
+ @cached_changed_attributes = changed_attributes
+ yield
+ ensure
+ clear_changed_attributes_cache
+ end
+
+ def clear_changed_attributes_cache
+ remove_instance_variable(:@cached_changed_attributes) if defined?(@cached_changed_attributes)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods/primary_key.rb b/activerecord/lib/active_record/attribute_methods/primary_key.rb
new file mode 100644
index 0000000000..63c059e291
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods/primary_key.rb
@@ -0,0 +1,141 @@
+# frozen_string_literal: true
+
+require "set"
+
+module ActiveRecord
+ module AttributeMethods
+ module PrimaryKey
+ extend ActiveSupport::Concern
+
+ # Returns this record's primary key value wrapped in an array if one is
+ # available.
+ def to_key
+ key = id
+ [key] if key
+ end
+
+ # Returns the primary key value.
+ def id
+ sync_with_transaction_state
+ _read_attribute(self.class.primary_key) if self.class.primary_key
+ end
+
+ # Sets the primary key value.
+ def id=(value)
+ sync_with_transaction_state
+ _write_attribute(self.class.primary_key, value) if self.class.primary_key
+ end
+
+ # Queries the primary key value.
+ def id?
+ sync_with_transaction_state
+ query_attribute(self.class.primary_key)
+ end
+
+ # Returns the primary key value before type cast.
+ def id_before_type_cast
+ sync_with_transaction_state
+ read_attribute_before_type_cast(self.class.primary_key)
+ end
+
+ # Returns the primary key previous value.
+ def id_was
+ sync_with_transaction_state
+ attribute_was(self.class.primary_key)
+ end
+
+ def id_in_database
+ sync_with_transaction_state
+ attribute_in_database(self.class.primary_key)
+ end
+
+ private
+
+ def attribute_method?(attr_name)
+ attr_name == "id" || super
+ end
+
+ module ClassMethods
+ ID_ATTRIBUTE_METHODS = %w(id id= id? id_before_type_cast id_was id_in_database).to_set
+
+ def instance_method_already_implemented?(method_name)
+ super || primary_key && ID_ATTRIBUTE_METHODS.include?(method_name)
+ end
+
+ def dangerous_attribute_method?(method_name)
+ super && !ID_ATTRIBUTE_METHODS.include?(method_name)
+ end
+
+ # Defines the primary key field -- can be overridden in subclasses.
+ # Overwriting will negate any effect of the +primary_key_prefix_type+
+ # setting, though.
+ def primary_key
+ @primary_key = reset_primary_key unless defined? @primary_key
+ @primary_key
+ end
+
+ # Returns a quoted version of the primary key name, used to construct
+ # SQL statements.
+ def quoted_primary_key
+ @quoted_primary_key ||= connection.quote_column_name(primary_key)
+ end
+
+ def reset_primary_key #:nodoc:
+ if self == base_class
+ self.primary_key = get_primary_key(base_class.name)
+ else
+ self.primary_key = base_class.primary_key
+ end
+ end
+
+ def get_primary_key(base_name) #:nodoc:
+ if base_name && primary_key_prefix_type == :table_name
+ base_name.foreign_key(false)
+ elsif base_name && primary_key_prefix_type == :table_name_with_underscore
+ base_name.foreign_key
+ else
+ if ActiveRecord::Base != self && table_exists?
+ pk = connection.schema_cache.primary_keys(table_name)
+ suppress_composite_primary_key(pk)
+ else
+ "id"
+ end
+ end
+ end
+
+ # Sets the name of the primary key column.
+ #
+ # class Project < ActiveRecord::Base
+ # self.primary_key = 'sysid'
+ # end
+ #
+ # You can also define the #primary_key method yourself:
+ #
+ # class Project < ActiveRecord::Base
+ # def self.primary_key
+ # 'foo_' + super
+ # end
+ # end
+ #
+ # Project.primary_key # => "foo_id"
+ def primary_key=(value)
+ @primary_key = value && value.to_s
+ @quoted_primary_key = nil
+ @attributes_builder = nil
+ end
+
+ private
+
+ def suppress_composite_primary_key(pk)
+ return pk unless pk.is_a?(Array)
+
+ warn <<-WARNING.strip_heredoc
+ WARNING: Active Record does not support composite primary key.
+
+ #{table_name} has composite primary key. Composite primary key is ignored.
+ WARNING
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods/query.rb b/activerecord/lib/active_record/attribute_methods/query.rb
new file mode 100644
index 0000000000..6757e9b66a
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods/query.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module AttributeMethods
+ module Query
+ extend ActiveSupport::Concern
+
+ included do
+ attribute_method_suffix "?"
+ end
+
+ def query_attribute(attr_name)
+ value = self[attr_name]
+
+ case value
+ when true then true
+ when false, nil then false
+ else
+ column = self.class.columns_hash[attr_name]
+ if column.nil?
+ if Numeric === value || value !~ /[^0-9]/
+ !value.to_i.zero?
+ else
+ return false if ActiveModel::Type::Boolean::FALSE_VALUES.include?(value)
+ !value.blank?
+ end
+ elsif value.respond_to?(:zero?)
+ !value.zero?
+ else
+ !value.blank?
+ end
+ end
+ end
+
+ private
+ # Handle *? for method_missing.
+ def attribute?(attribute_name)
+ query_attribute(attribute_name)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods/read.rb b/activerecord/lib/active_record/attribute_methods/read.rb
new file mode 100644
index 0000000000..6adaeb0121
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods/read.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module AttributeMethods
+ module Read
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ private
+
+ # We want to generate the methods via module_eval rather than
+ # define_method, because define_method is slower on dispatch.
+ # Evaluating many similar methods may use more memory as the instruction
+ # sequences are duplicated and cached (in MRI). define_method may
+ # be slower on dispatch, but if you're careful about the closure
+ # created, then define_method will consume much less memory.
+ #
+ # But sometimes the database might return columns with
+ # characters that are not allowed in normal method names (like
+ # 'my_column(omg)'. So to work around this we first define with
+ # the __temp__ identifier, and then use alias method to rename
+ # it to what we want.
+ #
+ # We are also defining a constant to hold the frozen string of
+ # the attribute name. Using a constant means that we do not have
+ # to allocate an object on each call to the attribute method.
+ # Making it frozen means that it doesn't get duped when used to
+ # key the @attributes in read_attribute.
+ def define_method_attribute(name)
+ safe_name = name.unpack("h*".freeze).first
+ temp_method = "__temp__#{safe_name}"
+
+ ActiveRecord::AttributeMethods::AttrNames.set_name_cache safe_name, name
+
+ generated_attribute_methods.module_eval <<-STR, __FILE__, __LINE__ + 1
+ def #{temp_method}
+ name = ::ActiveRecord::AttributeMethods::AttrNames::ATTR_#{safe_name}
+ _read_attribute(name) { |n| missing_attribute(n, caller) }
+ end
+ STR
+
+ generated_attribute_methods.module_eval do
+ alias_method name, temp_method
+ undef_method temp_method
+ end
+ end
+ end
+
+ # Returns the value of the attribute identified by <tt>attr_name</tt> after
+ # it has been typecast (for example, "2004-12-12" in a date column is cast
+ # to a date object, like Date.new(2004, 12, 12)).
+ def read_attribute(attr_name, &block)
+ name = if self.class.attribute_alias?(attr_name)
+ self.class.attribute_alias(attr_name).to_s
+ else
+ attr_name.to_s
+ end
+
+ name = self.class.primary_key if name == "id".freeze && self.class.primary_key
+ _read_attribute(name, &block)
+ end
+
+ # This method exists to avoid the expensive primary_key check internally, without
+ # breaking compatibility with the read_attribute API
+ if defined?(JRUBY_VERSION)
+ # This form is significantly faster on JRuby, and this is one of our biggest hotspots.
+ # https://github.com/jruby/jruby/pull/2562
+ def _read_attribute(attr_name, &block) # :nodoc
+ @attributes.fetch_value(attr_name.to_s, &block)
+ end
+ else
+ def _read_attribute(attr_name) # :nodoc:
+ @attributes.fetch_value(attr_name.to_s) { |n| yield n if block_given? }
+ end
+ end
+
+ alias :attribute :_read_attribute
+ private :attribute
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods/serialization.rb b/activerecord/lib/active_record/attribute_methods/serialization.rb
new file mode 100644
index 0000000000..6ed45d8737
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods/serialization.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module AttributeMethods
+ module Serialization
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ # If you have an attribute that needs to be saved to the database as an
+ # object, and retrieved as the same object, then specify the name of that
+ # attribute using this method and it will be handled automatically. The
+ # serialization is done through YAML. If +class_name+ is specified, the
+ # serialized object must be of that class on assignment and retrieval.
+ # Otherwise SerializationTypeMismatch will be raised.
+ #
+ # Empty objects as <tt>{}</tt>, in the case of +Hash+, or <tt>[]</tt>, in the case of
+ # +Array+, will always be persisted as null.
+ #
+ # Keep in mind that database adapters handle certain serialization tasks
+ # for you. For instance: +json+ and +jsonb+ types in PostgreSQL will be
+ # converted between JSON object/array syntax and Ruby +Hash+ or +Array+
+ # objects transparently. There is no need to use #serialize in this
+ # case.
+ #
+ # For more complex cases, such as conversion to or from your application
+ # domain objects, consider using the ActiveRecord::Attributes API.
+ #
+ # ==== Parameters
+ #
+ # * +attr_name+ - The field name that should be serialized.
+ # * +class_name_or_coder+ - Optional, a coder object, which responds to +.load+ and +.dump+
+ # or a class name that the object type should be equal to.
+ #
+ # ==== Example
+ #
+ # # Serialize a preferences attribute.
+ # class User < ActiveRecord::Base
+ # serialize :preferences
+ # end
+ #
+ # # Serialize preferences using JSON as coder.
+ # class User < ActiveRecord::Base
+ # serialize :preferences, JSON
+ # end
+ #
+ # # Serialize preferences as Hash using YAML coder.
+ # class User < ActiveRecord::Base
+ # serialize :preferences, Hash
+ # end
+ def serialize(attr_name, class_name_or_coder = Object)
+ # When ::JSON is used, force it to go through the Active Support JSON encoder
+ # to ensure special objects (e.g. Active Record models) are dumped correctly
+ # using the #as_json hook.
+ coder = if class_name_or_coder == ::JSON
+ Coders::JSON
+ elsif [:load, :dump].all? { |x| class_name_or_coder.respond_to?(x) }
+ class_name_or_coder
+ else
+ Coders::YAMLColumn.new(attr_name, class_name_or_coder)
+ end
+
+ decorate_attribute_type(attr_name, :serialize) do |type|
+ Type::Serialized.new(type, coder)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods/time_zone_conversion.rb b/activerecord/lib/active_record/attribute_methods/time_zone_conversion.rb
new file mode 100644
index 0000000000..f12a9f915c
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods/time_zone_conversion.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module AttributeMethods
+ module TimeZoneConversion
+ class TimeZoneConverter < DelegateClass(Type::Value) # :nodoc:
+ def deserialize(value)
+ convert_time_to_time_zone(super)
+ end
+
+ def cast(value)
+ return if value.nil?
+
+ if value.is_a?(Hash)
+ set_time_zone_without_conversion(super)
+ elsif value.respond_to?(:in_time_zone)
+ begin
+ super(user_input_in_time_zone(value)) || super
+ rescue ArgumentError
+ nil
+ end
+ else
+ map_avoiding_infinite_recursion(super) { |v| cast(v) }
+ end
+ end
+
+ private
+
+ def convert_time_to_time_zone(value)
+ return if value.nil?
+
+ if value.acts_like?(:time)
+ value.in_time_zone
+ elsif value.is_a?(::Float)
+ value
+ else
+ map_avoiding_infinite_recursion(value) { |v| convert_time_to_time_zone(v) }
+ end
+ end
+
+ def set_time_zone_without_conversion(value)
+ ::Time.zone.local_to_utc(value).try(:in_time_zone) if value
+ end
+
+ def map_avoiding_infinite_recursion(value)
+ map(value) do |v|
+ if value.equal?(v)
+ nil
+ else
+ yield(v)
+ end
+ end
+ end
+ end
+
+ extend ActiveSupport::Concern
+
+ included do
+ mattr_accessor :time_zone_aware_attributes, instance_writer: false, default: false
+
+ class_attribute :skip_time_zone_conversion_for_attributes, instance_writer: false, default: []
+ class_attribute :time_zone_aware_types, instance_writer: false, default: [ :datetime, :time ]
+ end
+
+ module ClassMethods
+ private
+
+ def inherited(subclass)
+ super
+ # We need to apply this decorator here, rather than on module inclusion. The closure
+ # created by the matcher would otherwise evaluate for `ActiveRecord::Base`, not the
+ # sub class being decorated. As such, changes to `time_zone_aware_attributes`, or
+ # `skip_time_zone_conversion_for_attributes` would not be picked up.
+ subclass.class_eval do
+ matcher = ->(name, type) { create_time_zone_conversion_attribute?(name, type) }
+ decorate_matching_attribute_types(matcher, :_time_zone_conversion) do |type|
+ TimeZoneConverter.new(type)
+ end
+ end
+ end
+
+ def create_time_zone_conversion_attribute?(name, cast_type)
+ enabled_for_column = time_zone_aware_attributes &&
+ !skip_time_zone_conversion_for_attributes.include?(name.to_sym)
+
+ enabled_for_column && time_zone_aware_types.include?(cast_type.type)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods/write.rb b/activerecord/lib/active_record/attribute_methods/write.rb
new file mode 100644
index 0000000000..fa01f832ac
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods/write.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module AttributeMethods
+ module Write
+ extend ActiveSupport::Concern
+
+ included do
+ attribute_method_suffix "="
+ end
+
+ module ClassMethods
+ private
+
+ def define_method_attribute=(name)
+ safe_name = name.unpack("h*".freeze).first
+ ActiveRecord::AttributeMethods::AttrNames.set_name_cache safe_name, name
+
+ generated_attribute_methods.module_eval <<-STR, __FILE__, __LINE__ + 1
+ def __temp__#{safe_name}=(value)
+ name = ::ActiveRecord::AttributeMethods::AttrNames::ATTR_#{safe_name}
+ _write_attribute(name, value)
+ end
+ alias_method #{(name + '=').inspect}, :__temp__#{safe_name}=
+ undef_method :__temp__#{safe_name}=
+ STR
+ end
+ end
+
+ # Updates the attribute identified by <tt>attr_name</tt> with the
+ # specified +value+. Empty strings for Integer and Float columns are
+ # turned into +nil+.
+ def write_attribute(attr_name, value)
+ name = if self.class.attribute_alias?(attr_name)
+ self.class.attribute_alias(attr_name).to_s
+ else
+ attr_name.to_s
+ end
+
+ name = self.class.primary_key if name == "id".freeze && self.class.primary_key
+ _write_attribute(name, value)
+ end
+
+ # This method exists to avoid the expensive primary_key check internally, without
+ # breaking compatibility with the write_attribute API
+ def _write_attribute(attr_name, value) # :nodoc:
+ @attributes.write_from_user(attr_name.to_s, value)
+ value
+ end
+
+ private
+ def write_attribute_without_type_cast(attr_name, value)
+ name = attr_name.to_s
+ @attributes.write_cast_value(name, value)
+ value
+ end
+
+ # Handle *= for method_missing.
+ def attribute=(attribute_name, value)
+ _write_attribute(attribute_name, value)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_mutation_tracker.rb b/activerecord/lib/active_record/attribute_mutation_tracker.rb
new file mode 100644
index 0000000000..94bf641a5d
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_mutation_tracker.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class AttributeMutationTracker # :nodoc:
+ OPTION_NOT_GIVEN = Object.new
+
+ def initialize(attributes)
+ @attributes = attributes
+ @forced_changes = Set.new
+ end
+
+ def changed_values
+ attr_names.each_with_object({}.with_indifferent_access) do |attr_name, result|
+ if changed?(attr_name)
+ result[attr_name] = attributes[attr_name].original_value
+ end
+ end
+ end
+
+ def changes
+ attr_names.each_with_object({}.with_indifferent_access) do |attr_name, result|
+ change = change_to_attribute(attr_name)
+ if change
+ result[attr_name] = change
+ end
+ end
+ end
+
+ def change_to_attribute(attr_name)
+ attr_name = attr_name.to_s
+ if changed?(attr_name)
+ [attributes[attr_name].original_value, attributes.fetch_value(attr_name)]
+ end
+ end
+
+ def any_changes?
+ attr_names.any? { |attr| changed?(attr) }
+ end
+
+ def changed?(attr_name, from: OPTION_NOT_GIVEN, to: OPTION_NOT_GIVEN)
+ attr_name = attr_name.to_s
+ forced_changes.include?(attr_name) ||
+ attributes[attr_name].changed? &&
+ (OPTION_NOT_GIVEN == from || attributes[attr_name].original_value == from) &&
+ (OPTION_NOT_GIVEN == to || attributes[attr_name].value == to)
+ end
+
+ def changed_in_place?(attr_name)
+ attributes[attr_name.to_s].changed_in_place?
+ end
+
+ def forget_change(attr_name)
+ attr_name = attr_name.to_s
+ attributes[attr_name] = attributes[attr_name].forgetting_assignment
+ forced_changes.delete(attr_name)
+ end
+
+ def original_value(attr_name)
+ attributes[attr_name.to_s].original_value
+ end
+
+ def force_change(attr_name)
+ forced_changes << attr_name.to_s
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+
+ attr_reader :attributes, :forced_changes
+
+ private
+
+ def attr_names
+ attributes.keys
+ end
+ end
+
+ class NullMutationTracker # :nodoc:
+ include Singleton
+
+ def changed_values(*)
+ {}
+ end
+
+ def changes(*)
+ {}
+ end
+
+ def change_to_attribute(attr_name)
+ end
+
+ def any_changes?(*)
+ false
+ end
+
+ def changed?(*)
+ false
+ end
+
+ def changed_in_place?(*)
+ false
+ end
+
+ def forget_change(*)
+ end
+
+ def original_value(*)
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_set.rb b/activerecord/lib/active_record/attribute_set.rb
new file mode 100644
index 0000000000..492067e2b3
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_set.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require_relative "attribute_set/builder"
+require_relative "attribute_set/yaml_encoder"
+
+module ActiveRecord
+ class AttributeSet # :nodoc:
+ delegate :each_value, :fetch, to: :attributes
+
+ def initialize(attributes)
+ @attributes = attributes
+ end
+
+ def [](name)
+ attributes[name] || Attribute.null(name)
+ end
+
+ def []=(name, value)
+ attributes[name] = value
+ end
+
+ def values_before_type_cast
+ attributes.transform_values(&:value_before_type_cast)
+ end
+
+ def to_hash
+ initialized_attributes.transform_values(&:value)
+ end
+ alias_method :to_h, :to_hash
+
+ def key?(name)
+ attributes.key?(name) && self[name].initialized?
+ end
+
+ def keys
+ attributes.each_key.select { |name| self[name].initialized? }
+ end
+
+ if defined?(JRUBY_VERSION)
+ # This form is significantly faster on JRuby, and this is one of our biggest hotspots.
+ # https://github.com/jruby/jruby/pull/2562
+ def fetch_value(name, &block)
+ self[name].value(&block)
+ end
+ else
+ def fetch_value(name)
+ self[name].value { |n| yield n if block_given? }
+ end
+ end
+
+ def write_from_database(name, value)
+ attributes[name] = self[name].with_value_from_database(value)
+ end
+
+ def write_from_user(name, value)
+ attributes[name] = self[name].with_value_from_user(value)
+ end
+
+ def write_cast_value(name, value)
+ attributes[name] = self[name].with_cast_value(value)
+ end
+
+ def freeze
+ @attributes.freeze
+ super
+ end
+
+ def deep_dup
+ self.class.allocate.tap do |copy|
+ copy.instance_variable_set(:@attributes, attributes.deep_dup)
+ end
+ end
+
+ def initialize_dup(_)
+ @attributes = attributes.dup
+ super
+ end
+
+ def initialize_clone(_)
+ @attributes = attributes.clone
+ super
+ end
+
+ def reset(key)
+ if key?(key)
+ write_from_database(key, nil)
+ end
+ end
+
+ def accessed
+ attributes.select { |_, attr| attr.has_been_read? }.keys
+ end
+
+ def map(&block)
+ new_attributes = attributes.transform_values(&block)
+ AttributeSet.new(new_attributes)
+ end
+
+ def ==(other)
+ attributes == other.attributes
+ end
+
+ protected
+
+ attr_reader :attributes
+
+ private
+
+ def initialized_attributes
+ attributes.select { |_, attr| attr.initialized? }
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_set/builder.rb b/activerecord/lib/active_record/attribute_set/builder.rb
new file mode 100644
index 0000000000..e3a9c7fdb3
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_set/builder.rb
@@ -0,0 +1,126 @@
+# frozen_string_literal: true
+
+require_relative "../attribute"
+
+module ActiveRecord
+ class AttributeSet # :nodoc:
+ class Builder # :nodoc:
+ attr_reader :types, :always_initialized, :default
+
+ def initialize(types, always_initialized = nil, &default)
+ @types = types
+ @always_initialized = always_initialized
+ @default = default
+ end
+
+ def build_from_database(values = {}, additional_types = {})
+ if always_initialized && !values.key?(always_initialized)
+ values[always_initialized] = nil
+ end
+
+ attributes = LazyAttributeHash.new(types, values, additional_types, &default)
+ AttributeSet.new(attributes)
+ end
+ end
+ end
+
+ class LazyAttributeHash # :nodoc:
+ delegate :transform_values, :each_key, :each_value, :fetch, to: :materialize
+
+ def initialize(types, values, additional_types, &default)
+ @types = types
+ @values = values
+ @additional_types = additional_types
+ @materialized = false
+ @delegate_hash = {}
+ @default = default || proc {}
+ end
+
+ def key?(key)
+ delegate_hash.key?(key) || values.key?(key) || types.key?(key)
+ end
+
+ def [](key)
+ delegate_hash[key] || assign_default_value(key)
+ end
+
+ def []=(key, value)
+ if frozen?
+ raise RuntimeError, "Can't modify frozen hash"
+ end
+ delegate_hash[key] = value
+ end
+
+ def deep_dup
+ dup.tap do |copy|
+ copy.instance_variable_set(:@delegate_hash, delegate_hash.transform_values(&:dup))
+ end
+ end
+
+ def initialize_dup(_)
+ @delegate_hash = Hash[delegate_hash]
+ super
+ end
+
+ def select
+ keys = types.keys | values.keys | delegate_hash.keys
+ keys.each_with_object({}) do |key, hash|
+ attribute = self[key]
+ if yield(key, attribute)
+ hash[key] = attribute
+ end
+ end
+ end
+
+ def ==(other)
+ if other.is_a?(LazyAttributeHash)
+ materialize == other.materialize
+ else
+ materialize == other
+ end
+ end
+
+ def marshal_dump
+ materialize
+ end
+
+ def marshal_load(delegate_hash)
+ @delegate_hash = delegate_hash
+ @types = {}
+ @values = {}
+ @additional_types = {}
+ @materialized = true
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+
+ attr_reader :types, :values, :additional_types, :delegate_hash, :default
+
+ def materialize
+ unless @materialized
+ values.each_key { |key| self[key] }
+ types.each_key { |key| self[key] }
+ unless frozen?
+ @materialized = true
+ end
+ end
+ delegate_hash
+ end
+
+ private
+
+ def assign_default_value(name)
+ type = additional_types.fetch(name, types[name])
+ value_present = true
+ value = values.fetch(name) { value_present = false }
+
+ if value_present
+ delegate_hash[name] = Attribute.from_database(name, value, type)
+ elsif types.key?(name)
+ delegate_hash[name] = default.call(name) || Attribute.uninitialized(name, type)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_set/yaml_encoder.rb b/activerecord/lib/active_record/attribute_set/yaml_encoder.rb
new file mode 100644
index 0000000000..9254ce16ab
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_set/yaml_encoder.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class AttributeSet
+ # Attempts to do more intelligent YAML dumping of an
+ # ActiveRecord::AttributeSet to reduce the size of the resulting string
+ class YAMLEncoder # :nodoc:
+ def initialize(default_types)
+ @default_types = default_types
+ end
+
+ def encode(attribute_set, coder)
+ coder["concise_attributes"] = attribute_set.each_value.map do |attr|
+ if attr.type.equal?(default_types[attr.name])
+ attr.with_type(nil)
+ else
+ attr
+ end
+ end
+ end
+
+ def decode(coder)
+ if coder["attributes"]
+ coder["attributes"]
+ else
+ attributes_hash = Hash[coder["concise_attributes"].map do |attr|
+ if attr.type.nil?
+ attr = attr.with_type(default_types[attr.name])
+ end
+ [attr.name, attr]
+ end]
+ AttributeSet.new(attributes_hash)
+ end
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+
+ attr_reader :default_types
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attributes.rb b/activerecord/lib/active_record/attributes.rb
new file mode 100644
index 0000000000..afb559db71
--- /dev/null
+++ b/activerecord/lib/active_record/attributes.rb
@@ -0,0 +1,266 @@
+# frozen_string_literal: true
+
+require_relative "attribute/user_provided_default"
+
+module ActiveRecord
+ # See ActiveRecord::Attributes::ClassMethods for documentation
+ module Attributes
+ extend ActiveSupport::Concern
+
+ included do
+ class_attribute :attributes_to_define_after_schema_loads, instance_accessor: false, default: {} # :internal:
+ end
+
+ module ClassMethods
+ # Defines an attribute with a type on this model. It will override the
+ # type of existing attributes if needed. This allows control over how
+ # values are converted to and from SQL when assigned to a model. It also
+ # changes the behavior of values passed to
+ # {ActiveRecord::Base.where}[rdoc-ref:QueryMethods#where]. This will let you use
+ # your domain objects across much of Active Record, without having to
+ # rely on implementation details or monkey patching.
+ #
+ # +name+ The name of the methods to define attribute methods for, and the
+ # column which this will persist to.
+ #
+ # +cast_type+ A symbol such as +:string+ or +:integer+, or a type object
+ # to be used for this attribute. See the examples below for more
+ # information about providing custom type objects.
+ #
+ # ==== Options
+ #
+ # The following options are accepted:
+ #
+ # +default+ The default value to use when no value is provided. If this option
+ # is not passed, the previous default value (if any) will be used.
+ # Otherwise, the default will be +nil+.
+ #
+ # +array+ (PostgreSQL only) specifies that the type should be an array (see the
+ # examples below).
+ #
+ # +range+ (PostgreSQL only) specifies that the type should be a range (see the
+ # examples below).
+ #
+ # ==== Examples
+ #
+ # The type detected by Active Record can be overridden.
+ #
+ # # db/schema.rb
+ # create_table :store_listings, force: true do |t|
+ # t.decimal :price_in_cents
+ # end
+ #
+ # # app/models/store_listing.rb
+ # class StoreListing < ActiveRecord::Base
+ # end
+ #
+ # store_listing = StoreListing.new(price_in_cents: '10.1')
+ #
+ # # before
+ # store_listing.price_in_cents # => BigDecimal.new(10.1)
+ #
+ # class StoreListing < ActiveRecord::Base
+ # attribute :price_in_cents, :integer
+ # end
+ #
+ # # after
+ # store_listing.price_in_cents # => 10
+ #
+ # A default can also be provided.
+ #
+ # # db/schema.rb
+ # create_table :store_listings, force: true do |t|
+ # t.string :my_string, default: "original default"
+ # end
+ #
+ # StoreListing.new.my_string # => "original default"
+ #
+ # # app/models/store_listing.rb
+ # class StoreListing < ActiveRecord::Base
+ # attribute :my_string, :string, default: "new default"
+ # end
+ #
+ # StoreListing.new.my_string # => "new default"
+ #
+ # class Product < ActiveRecord::Base
+ # attribute :my_default_proc, :datetime, default: -> { Time.now }
+ # end
+ #
+ # Product.new.my_default_proc # => 2015-05-30 11:04:48 -0600
+ # sleep 1
+ # Product.new.my_default_proc # => 2015-05-30 11:04:49 -0600
+ #
+ # \Attributes do not need to be backed by a database column.
+ #
+ # # app/models/my_model.rb
+ # class MyModel < ActiveRecord::Base
+ # attribute :my_string, :string
+ # attribute :my_int_array, :integer, array: true
+ # attribute :my_float_range, :float, range: true
+ # end
+ #
+ # model = MyModel.new(
+ # my_string: "string",
+ # my_int_array: ["1", "2", "3"],
+ # my_float_range: "[1,3.5]",
+ # )
+ # model.attributes
+ # # =>
+ # {
+ # my_string: "string",
+ # my_int_array: [1, 2, 3],
+ # my_float_range: 1.0..3.5
+ # }
+ #
+ # ==== Creating Custom Types
+ #
+ # Users may also define their own custom types, as long as they respond
+ # to the methods defined on the value type. The method +deserialize+ or
+ # +cast+ will be called on your type object, with raw input from the
+ # database or from your controllers. See ActiveModel::Type::Value for the
+ # expected API. It is recommended that your type objects inherit from an
+ # existing type, or from ActiveRecord::Type::Value
+ #
+ # class MoneyType < ActiveRecord::Type::Integer
+ # def cast(value)
+ # if !value.kind_of?(Numeric) && value.include?('$')
+ # price_in_dollars = value.gsub(/\$/, '').to_f
+ # super(price_in_dollars * 100)
+ # else
+ # super
+ # end
+ # end
+ # end
+ #
+ # # config/initializers/types.rb
+ # ActiveRecord::Type.register(:money, MoneyType)
+ #
+ # # app/models/store_listing.rb
+ # class StoreListing < ActiveRecord::Base
+ # attribute :price_in_cents, :money
+ # end
+ #
+ # store_listing = StoreListing.new(price_in_cents: '$10.00')
+ # store_listing.price_in_cents # => 1000
+ #
+ # For more details on creating custom types, see the documentation for
+ # ActiveModel::Type::Value. For more details on registering your types
+ # to be referenced by a symbol, see ActiveRecord::Type.register. You can
+ # also pass a type object directly, in place of a symbol.
+ #
+ # ==== \Querying
+ #
+ # When {ActiveRecord::Base.where}[rdoc-ref:QueryMethods#where] is called, it will
+ # use the type defined by the model class to convert the value to SQL,
+ # calling +serialize+ on your type object. For example:
+ #
+ # class Money < Struct.new(:amount, :currency)
+ # end
+ #
+ # class MoneyType < Type::Value
+ # def initialize(currency_converter:)
+ # @currency_converter = currency_converter
+ # end
+ #
+ # # value will be the result of +deserialize+ or
+ # # +cast+. Assumed to be an instance of +Money+ in
+ # # this case.
+ # def serialize(value)
+ # value_in_bitcoins = @currency_converter.convert_to_bitcoins(value)
+ # value_in_bitcoins.amount
+ # end
+ # end
+ #
+ # # config/initializers/types.rb
+ # ActiveRecord::Type.register(:money, MoneyType)
+ #
+ # # app/models/product.rb
+ # class Product < ActiveRecord::Base
+ # currency_converter = ConversionRatesFromTheInternet.new
+ # attribute :price_in_bitcoins, :money, currency_converter: currency_converter
+ # end
+ #
+ # Product.where(price_in_bitcoins: Money.new(5, "USD"))
+ # # => SELECT * FROM products WHERE price_in_bitcoins = 0.02230
+ #
+ # Product.where(price_in_bitcoins: Money.new(5, "GBP"))
+ # # => SELECT * FROM products WHERE price_in_bitcoins = 0.03412
+ #
+ # ==== Dirty Tracking
+ #
+ # The type of an attribute is given the opportunity to change how dirty
+ # tracking is performed. The methods +changed?+ and +changed_in_place?+
+ # will be called from ActiveModel::Dirty. See the documentation for those
+ # methods in ActiveModel::Type::Value for more details.
+ def attribute(name, cast_type = Type::Value.new, **options)
+ name = name.to_s
+ reload_schema_from_cache
+
+ self.attributes_to_define_after_schema_loads =
+ attributes_to_define_after_schema_loads.merge(
+ name => [cast_type, options]
+ )
+ end
+
+ # This is the low level API which sits beneath +attribute+. It only
+ # accepts type objects, and will do its work immediately instead of
+ # waiting for the schema to load. Automatic schema detection and
+ # ClassMethods#attribute both call this under the hood. While this method
+ # is provided so it can be used by plugin authors, application code
+ # should probably use ClassMethods#attribute.
+ #
+ # +name+ The name of the attribute being defined. Expected to be a +String+.
+ #
+ # +cast_type+ The type object to use for this attribute.
+ #
+ # +default+ The default value to use when no value is provided. If this option
+ # is not passed, the previous default value (if any) will be used.
+ # Otherwise, the default will be +nil+. A proc can also be passed, and
+ # will be called once each time a new value is needed.
+ #
+ # +user_provided_default+ Whether the default value should be cast using
+ # +cast+ or +deserialize+.
+ def define_attribute(
+ name,
+ cast_type,
+ default: NO_DEFAULT_PROVIDED,
+ user_provided_default: true
+ )
+ attribute_types[name] = cast_type
+ define_default_attribute(name, default, cast_type, from_user: user_provided_default)
+ end
+
+ def load_schema! # :nodoc:
+ super
+ attributes_to_define_after_schema_loads.each do |name, (type, options)|
+ if type.is_a?(Symbol)
+ type = ActiveRecord::Type.lookup(type, **options.except(:default))
+ end
+
+ define_attribute(name, type, **options.slice(:default))
+ end
+ end
+
+ private
+
+ NO_DEFAULT_PROVIDED = Object.new # :nodoc:
+ private_constant :NO_DEFAULT_PROVIDED
+
+ def define_default_attribute(name, value, type, from_user:)
+ if value == NO_DEFAULT_PROVIDED
+ default_attribute = _default_attributes[name].with_type(type)
+ elsif from_user
+ default_attribute = Attribute::UserProvidedDefault.new(
+ name,
+ value,
+ type,
+ _default_attributes.fetch(name.to_s) { nil },
+ )
+ else
+ default_attribute = Attribute.from_database(name, value, type)
+ end
+ _default_attributes[name] = default_attribute
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/autosave_association.rb b/activerecord/lib/active_record/autosave_association.rb
new file mode 100644
index 0000000000..6974cf74f6
--- /dev/null
+++ b/activerecord/lib/active_record/autosave_association.rb
@@ -0,0 +1,490 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record Autosave Association
+ #
+ # AutosaveAssociation is a module that takes care of automatically saving
+ # associated records when their parent is saved. In addition to saving, it
+ # also destroys any associated records that were marked for destruction.
+ # (See #mark_for_destruction and #marked_for_destruction?).
+ #
+ # Saving of the parent, its associations, and the destruction of marked
+ # associations, all happen inside a transaction. This should never leave the
+ # database in an inconsistent state.
+ #
+ # If validations for any of the associations fail, their error messages will
+ # be applied to the parent.
+ #
+ # Note that it also means that associations marked for destruction won't
+ # be destroyed directly. They will however still be marked for destruction.
+ #
+ # Note that <tt>autosave: false</tt> is not same as not declaring <tt>:autosave</tt>.
+ # When the <tt>:autosave</tt> option is not present then new association records are
+ # saved but the updated association records are not saved.
+ #
+ # == Validation
+ #
+ # Child records are validated unless <tt>:validate</tt> is +false+.
+ #
+ # == Callbacks
+ #
+ # Association with autosave option defines several callbacks on your
+ # model (before_save, after_create, after_update). Please note that
+ # callbacks are executed in the order they were defined in
+ # model. You should avoid modifying the association content, before
+ # autosave callbacks are executed. Placing your callbacks after
+ # associations is usually a good practice.
+ #
+ # === One-to-one Example
+ #
+ # class Post < ActiveRecord::Base
+ # has_one :author, autosave: true
+ # end
+ #
+ # Saving changes to the parent and its associated model can now be performed
+ # automatically _and_ atomically:
+ #
+ # post = Post.find(1)
+ # post.title # => "The current global position of migrating ducks"
+ # post.author.name # => "alloy"
+ #
+ # post.title = "On the migration of ducks"
+ # post.author.name = "Eloy Duran"
+ #
+ # post.save
+ # post.reload
+ # post.title # => "On the migration of ducks"
+ # post.author.name # => "Eloy Duran"
+ #
+ # Destroying an associated model, as part of the parent's save action, is as
+ # simple as marking it for destruction:
+ #
+ # post.author.mark_for_destruction
+ # post.author.marked_for_destruction? # => true
+ #
+ # Note that the model is _not_ yet removed from the database:
+ #
+ # id = post.author.id
+ # Author.find_by(id: id).nil? # => false
+ #
+ # post.save
+ # post.reload.author # => nil
+ #
+ # Now it _is_ removed from the database:
+ #
+ # Author.find_by(id: id).nil? # => true
+ #
+ # === One-to-many Example
+ #
+ # When <tt>:autosave</tt> is not declared new children are saved when their parent is saved:
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :comments # :autosave option is not declared
+ # end
+ #
+ # post = Post.new(title: 'ruby rocks')
+ # post.comments.build(body: 'hello world')
+ # post.save # => saves both post and comment
+ #
+ # post = Post.create(title: 'ruby rocks')
+ # post.comments.build(body: 'hello world')
+ # post.save # => saves both post and comment
+ #
+ # post = Post.create(title: 'ruby rocks')
+ # post.comments.create(body: 'hello world')
+ # post.save # => saves both post and comment
+ #
+ # When <tt>:autosave</tt> is true all children are saved, no matter whether they
+ # are new records or not:
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :comments, autosave: true
+ # end
+ #
+ # post = Post.create(title: 'ruby rocks')
+ # post.comments.create(body: 'hello world')
+ # post.comments[0].body = 'hi everyone'
+ # post.comments.build(body: "good morning.")
+ # post.title += "!"
+ # post.save # => saves both post and comments.
+ #
+ # Destroying one of the associated models as part of the parent's save action
+ # is as simple as marking it for destruction:
+ #
+ # post.comments # => [#<Comment id: 1, ...>, #<Comment id: 2, ...]>
+ # post.comments[1].mark_for_destruction
+ # post.comments[1].marked_for_destruction? # => true
+ # post.comments.length # => 2
+ #
+ # Note that the model is _not_ yet removed from the database:
+ #
+ # id = post.comments.last.id
+ # Comment.find_by(id: id).nil? # => false
+ #
+ # post.save
+ # post.reload.comments.length # => 1
+ #
+ # Now it _is_ removed from the database:
+ #
+ # Comment.find_by(id: id).nil? # => true
+ module AutosaveAssociation
+ extend ActiveSupport::Concern
+
+ module AssociationBuilderExtension #:nodoc:
+ def self.build(model, reflection)
+ model.send(:add_autosave_association_callbacks, reflection)
+ end
+
+ def self.valid_options
+ [ :autosave ]
+ end
+ end
+
+ included do
+ Associations::Builder::Association.extensions << AssociationBuilderExtension
+ mattr_accessor :index_nested_attribute_errors, instance_writer: false, default: false
+ end
+
+ module ClassMethods # :nodoc:
+ private
+
+ def define_non_cyclic_method(name, &block)
+ return if method_defined?(name)
+ define_method(name) do |*args|
+ result = true; @_already_called ||= {}
+ # Loop prevention for validation of associations
+ unless @_already_called[name]
+ begin
+ @_already_called[name] = true
+ result = instance_eval(&block)
+ ensure
+ @_already_called[name] = false
+ end
+ end
+
+ result
+ end
+ end
+
+ # Adds validation and save callbacks for the association as specified by
+ # the +reflection+.
+ #
+ # For performance reasons, we don't check whether to validate at runtime.
+ # However the validation and callback methods are lazy and those methods
+ # get created when they are invoked for the very first time. However,
+ # this can change, for instance, when using nested attributes, which is
+ # called _after_ the association has been defined. Since we don't want
+ # the callbacks to get defined multiple times, there are guards that
+ # check if the save or validation methods have already been defined
+ # before actually defining them.
+ def add_autosave_association_callbacks(reflection)
+ save_method = :"autosave_associated_records_for_#{reflection.name}"
+
+ if reflection.collection?
+ before_save :before_save_collection_association
+ after_save :after_save_collection_association
+
+ define_non_cyclic_method(save_method) { save_collection_association(reflection) }
+ # Doesn't use after_save as that would save associations added in after_create/after_update twice
+ after_create save_method
+ after_update save_method
+ elsif reflection.has_one?
+ define_method(save_method) { save_has_one_association(reflection) } unless method_defined?(save_method)
+ # Configures two callbacks instead of a single after_save so that
+ # the model may rely on their execution order relative to its
+ # own callbacks.
+ #
+ # For example, given that after_creates run before after_saves, if
+ # we configured instead an after_save there would be no way to fire
+ # a custom after_create callback after the child association gets
+ # created.
+ after_create save_method
+ after_update save_method
+ else
+ define_non_cyclic_method(save_method) { throw(:abort) if save_belongs_to_association(reflection) == false }
+ before_save save_method
+ end
+
+ define_autosave_validation_callbacks(reflection)
+ end
+
+ def define_autosave_validation_callbacks(reflection)
+ validation_method = :"validate_associated_records_for_#{reflection.name}"
+ if reflection.validate? && !method_defined?(validation_method)
+ if reflection.collection?
+ method = :validate_collection_association
+ else
+ method = :validate_single_association
+ end
+
+ define_non_cyclic_method(validation_method) { send(method, reflection) }
+ validate validation_method
+ after_validation :_ensure_no_duplicate_errors
+ end
+ end
+ end
+
+ # Reloads the attributes of the object as usual and clears <tt>marked_for_destruction</tt> flag.
+ def reload(options = nil)
+ @marked_for_destruction = false
+ @destroyed_by_association = nil
+ super
+ end
+
+ # Marks this record to be destroyed as part of the parent's save transaction.
+ # This does _not_ actually destroy the record instantly, rather child record will be destroyed
+ # when <tt>parent.save</tt> is called.
+ #
+ # Only useful if the <tt>:autosave</tt> option on the parent is enabled for this associated model.
+ def mark_for_destruction
+ @marked_for_destruction = true
+ end
+
+ # Returns whether or not this record will be destroyed as part of the parent's save transaction.
+ #
+ # Only useful if the <tt>:autosave</tt> option on the parent is enabled for this associated model.
+ def marked_for_destruction?
+ @marked_for_destruction
+ end
+
+ # Records the association that is being destroyed and destroying this
+ # record in the process.
+ def destroyed_by_association=(reflection)
+ @destroyed_by_association = reflection
+ end
+
+ # Returns the association for the parent being destroyed.
+ #
+ # Used to avoid updating the counter cache unnecessarily.
+ def destroyed_by_association
+ @destroyed_by_association
+ end
+
+ # Returns whether or not this record has been changed in any way (including whether
+ # any of its nested autosave associations are likewise changed)
+ def changed_for_autosave?
+ new_record? || has_changes_to_save? || marked_for_destruction? || nested_records_changed_for_autosave?
+ end
+
+ private
+
+ # Returns the record for an association collection that should be validated
+ # or saved. If +autosave+ is +false+ only new records will be returned,
+ # unless the parent is/was a new record itself.
+ def associated_records_to_validate_or_save(association, new_record, autosave)
+ if new_record
+ association && association.target
+ elsif autosave
+ association.target.find_all(&:changed_for_autosave?)
+ else
+ association.target.find_all(&:new_record?)
+ end
+ end
+
+ # go through nested autosave associations that are loaded in memory (without loading
+ # any new ones), and return true if is changed for autosave
+ def nested_records_changed_for_autosave?
+ @_nested_records_changed_for_autosave_already_called ||= false
+ return false if @_nested_records_changed_for_autosave_already_called
+ begin
+ @_nested_records_changed_for_autosave_already_called = true
+ self.class._reflections.values.any? do |reflection|
+ if reflection.options[:autosave]
+ association = association_instance_get(reflection.name)
+ association && Array.wrap(association.target).any?(&:changed_for_autosave?)
+ end
+ end
+ ensure
+ @_nested_records_changed_for_autosave_already_called = false
+ end
+ end
+
+ # Validate the association if <tt>:validate</tt> or <tt>:autosave</tt> is
+ # turned on for the association.
+ def validate_single_association(reflection)
+ association = association_instance_get(reflection.name)
+ record = association && association.reader
+ association_valid?(reflection, record) if record
+ end
+
+ # Validate the associated records if <tt>:validate</tt> or
+ # <tt>:autosave</tt> is turned on for the association specified by
+ # +reflection+.
+ def validate_collection_association(reflection)
+ if association = association_instance_get(reflection.name)
+ if records = associated_records_to_validate_or_save(association, new_record?, reflection.options[:autosave])
+ records.each_with_index { |record, index| association_valid?(reflection, record, index) }
+ end
+ end
+ end
+
+ # Returns whether or not the association is valid and applies any errors to
+ # the parent, <tt>self</tt>, if it wasn't. Skips any <tt>:autosave</tt>
+ # enabled records if they're marked_for_destruction? or destroyed.
+ def association_valid?(reflection, record, index = nil)
+ return true if record.destroyed? || (reflection.options[:autosave] && record.marked_for_destruction?)
+
+ context = validation_context unless [:create, :update].include?(validation_context)
+
+ unless valid = record.valid?(context)
+ if reflection.options[:autosave]
+ indexed_attribute = !index.nil? && (reflection.options[:index_errors] || ActiveRecord::Base.index_nested_attribute_errors)
+
+ record.errors.each do |attribute, message|
+ attribute = normalize_reflection_attribute(indexed_attribute, reflection, index, attribute)
+ errors[attribute] << message
+ errors[attribute].uniq!
+ end
+
+ record.errors.details.each_key do |attribute|
+ reflection_attribute =
+ normalize_reflection_attribute(indexed_attribute, reflection, index, attribute).to_sym
+
+ record.errors.details[attribute].each do |error|
+ errors.details[reflection_attribute] << error
+ errors.details[reflection_attribute].uniq!
+ end
+ end
+ else
+ errors.add(reflection.name)
+ end
+ end
+ valid
+ end
+
+ def normalize_reflection_attribute(indexed_attribute, reflection, index, attribute)
+ if indexed_attribute
+ "#{reflection.name}[#{index}].#{attribute}"
+ else
+ "#{reflection.name}.#{attribute}"
+ end
+ end
+
+ # Is used as a before_save callback to check while saving a collection
+ # association whether or not the parent was a new record before saving.
+ def before_save_collection_association
+ @new_record_before_save = new_record?
+ end
+
+ def after_save_collection_association
+ @new_record_before_save = false
+ end
+
+ # Saves any new associated records, or all loaded autosave associations if
+ # <tt>:autosave</tt> is enabled on the association.
+ #
+ # In addition, it destroys all children that were marked for destruction
+ # with #mark_for_destruction.
+ #
+ # This all happens inside a transaction, _if_ the Transactions module is included into
+ # ActiveRecord::Base after the AutosaveAssociation module, which it does by default.
+ def save_collection_association(reflection)
+ if association = association_instance_get(reflection.name)
+ autosave = reflection.options[:autosave]
+
+ # reconstruct the scope now that we know the owner's id
+ association.reset_scope
+
+ if records = associated_records_to_validate_or_save(association, @new_record_before_save, autosave)
+ if autosave
+ records_to_destroy = records.select(&:marked_for_destruction?)
+ records_to_destroy.each { |record| association.destroy(record) }
+ records -= records_to_destroy
+ end
+
+ records.each do |record|
+ next if record.destroyed?
+
+ saved = true
+
+ if autosave != false && (@new_record_before_save || record.new_record?)
+ if autosave
+ saved = association.insert_record(record, false)
+ else
+ association.insert_record(record) unless reflection.nested?
+ end
+ elsif autosave
+ saved = record.save(validate: false)
+ end
+
+ raise ActiveRecord::Rollback unless saved
+ end
+ end
+ end
+ end
+
+ # Saves the associated record if it's new or <tt>:autosave</tt> is enabled
+ # on the association.
+ #
+ # In addition, it will destroy the association if it was marked for
+ # destruction with #mark_for_destruction.
+ #
+ # This all happens inside a transaction, _if_ the Transactions module is included into
+ # ActiveRecord::Base after the AutosaveAssociation module, which it does by default.
+ def save_has_one_association(reflection)
+ association = association_instance_get(reflection.name)
+ record = association && association.load_target
+
+ if record && !record.destroyed?
+ autosave = reflection.options[:autosave]
+
+ if autosave && record.marked_for_destruction?
+ record.destroy
+ elsif autosave != false
+ key = reflection.options[:primary_key] ? send(reflection.options[:primary_key]) : id
+
+ if (autosave && record.changed_for_autosave?) || new_record? || record_changed?(reflection, record, key)
+ unless reflection.through_reflection
+ record[reflection.foreign_key] = key
+ end
+
+ saved = record.save(validate: !autosave)
+ raise ActiveRecord::Rollback if !saved && autosave
+ saved
+ end
+ end
+ end
+ end
+
+ # If the record is new or it has changed, returns true.
+ def record_changed?(reflection, record, key)
+ record.new_record? ||
+ (record.has_attribute?(reflection.foreign_key) && record[reflection.foreign_key] != key) ||
+ record.will_save_change_to_attribute?(reflection.foreign_key)
+ end
+
+ # Saves the associated record if it's new or <tt>:autosave</tt> is enabled.
+ #
+ # In addition, it will destroy the association if it was marked for destruction.
+ def save_belongs_to_association(reflection)
+ association = association_instance_get(reflection.name)
+ return unless association && association.loaded? && !association.stale_target?
+
+ record = association.load_target
+ if record && !record.destroyed?
+ autosave = reflection.options[:autosave]
+
+ if autosave && record.marked_for_destruction?
+ self[reflection.foreign_key] = nil
+ record.destroy
+ elsif autosave != false
+ saved = record.save(validate: !autosave) if record.new_record? || (autosave && record.changed_for_autosave?)
+
+ if association.updated?
+ association_id = record.send(reflection.options[:primary_key] || :id)
+ self[reflection.foreign_key] = association_id
+ association.loaded!
+ end
+
+ saved if autosave
+ end
+ end
+ end
+
+ def _ensure_no_duplicate_errors
+ errors.messages.each_key do |attribute|
+ errors[attribute].uniq!
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/base.rb b/activerecord/lib/active_record/base.rb
new file mode 100644
index 0000000000..541ff51fbe
--- /dev/null
+++ b/activerecord/lib/active_record/base.rb
@@ -0,0 +1,329 @@
+# frozen_string_literal: true
+
+require "yaml"
+require "active_support/benchmarkable"
+require "active_support/dependencies"
+require "active_support/descendants_tracker"
+require "active_support/time"
+require "active_support/core_ext/module/attribute_accessors"
+require "active_support/core_ext/array/extract_options"
+require "active_support/core_ext/hash/deep_merge"
+require "active_support/core_ext/hash/slice"
+require "active_support/core_ext/hash/transform_values"
+require "active_support/core_ext/string/behavior"
+require "active_support/core_ext/kernel/singleton_class"
+require "active_support/core_ext/module/introspection"
+require "active_support/core_ext/object/duplicable"
+require "active_support/core_ext/class/subclasses"
+require_relative "attribute_decorators"
+require_relative "define_callbacks"
+require_relative "errors"
+require_relative "log_subscriber"
+require_relative "explain_subscriber"
+require_relative "relation/delegation"
+require_relative "attributes"
+require_relative "type_caster"
+
+module ActiveRecord #:nodoc:
+ # = Active Record
+ #
+ # Active Record objects don't specify their attributes directly, but rather infer them from
+ # the table definition with which they're linked. Adding, removing, and changing attributes
+ # and their type is done directly in the database. Any change is instantly reflected in the
+ # Active Record objects. The mapping that binds a given Active Record class to a certain
+ # database table will happen automatically in most common cases, but can be overwritten for the uncommon ones.
+ #
+ # See the mapping rules in table_name and the full example in link:files/activerecord/README_rdoc.html for more insight.
+ #
+ # == Creation
+ #
+ # Active Records accept constructor parameters either in a hash or as a block. The hash
+ # method is especially useful when you're receiving the data from somewhere else, like an
+ # HTTP request. It works like this:
+ #
+ # user = User.new(name: "David", occupation: "Code Artist")
+ # user.name # => "David"
+ #
+ # You can also use block initialization:
+ #
+ # user = User.new do |u|
+ # u.name = "David"
+ # u.occupation = "Code Artist"
+ # end
+ #
+ # And of course you can just create a bare object and specify the attributes after the fact:
+ #
+ # user = User.new
+ # user.name = "David"
+ # user.occupation = "Code Artist"
+ #
+ # == Conditions
+ #
+ # Conditions can either be specified as a string, array, or hash representing the WHERE-part of an SQL statement.
+ # The array form is to be used when the condition input is tainted and requires sanitization. The string form can
+ # be used for statements that don't involve tainted data. The hash form works much like the array form, except
+ # only equality and range is possible. Examples:
+ #
+ # class User < ActiveRecord::Base
+ # def self.authenticate_unsafely(user_name, password)
+ # where("user_name = '#{user_name}' AND password = '#{password}'").first
+ # end
+ #
+ # def self.authenticate_safely(user_name, password)
+ # where("user_name = ? AND password = ?", user_name, password).first
+ # end
+ #
+ # def self.authenticate_safely_simply(user_name, password)
+ # where(user_name: user_name, password: password).first
+ # end
+ # end
+ #
+ # The <tt>authenticate_unsafely</tt> method inserts the parameters directly into the query
+ # and is thus susceptible to SQL-injection attacks if the <tt>user_name</tt> and +password+
+ # parameters come directly from an HTTP request. The <tt>authenticate_safely</tt> and
+ # <tt>authenticate_safely_simply</tt> both will sanitize the <tt>user_name</tt> and +password+
+ # before inserting them in the query, which will ensure that an attacker can't escape the
+ # query and fake the login (or worse).
+ #
+ # When using multiple parameters in the conditions, it can easily become hard to read exactly
+ # what the fourth or fifth question mark is supposed to represent. In those cases, you can
+ # resort to named bind variables instead. That's done by replacing the question marks with
+ # symbols and supplying a hash with values for the matching symbol keys:
+ #
+ # Company.where(
+ # "id = :id AND name = :name AND division = :division AND created_at > :accounting_date",
+ # { id: 3, name: "37signals", division: "First", accounting_date: '2005-01-01' }
+ # ).first
+ #
+ # Similarly, a simple hash without a statement will generate conditions based on equality with the SQL AND
+ # operator. For instance:
+ #
+ # Student.where(first_name: "Harvey", status: 1)
+ # Student.where(params[:student])
+ #
+ # A range may be used in the hash to use the SQL BETWEEN operator:
+ #
+ # Student.where(grade: 9..12)
+ #
+ # An array may be used in the hash to use the SQL IN operator:
+ #
+ # Student.where(grade: [9,11,12])
+ #
+ # When joining tables, nested hashes or keys written in the form 'table_name.column_name'
+ # can be used to qualify the table name of a particular condition. For instance:
+ #
+ # Student.joins(:schools).where(schools: { category: 'public' })
+ # Student.joins(:schools).where('schools.category' => 'public' )
+ #
+ # == Overwriting default accessors
+ #
+ # All column values are automatically available through basic accessors on the Active Record
+ # object, but sometimes you want to specialize this behavior. This can be done by overwriting
+ # the default accessors (using the same name as the attribute) and calling
+ # +super+ to actually change things.
+ #
+ # class Song < ActiveRecord::Base
+ # # Uses an integer of seconds to hold the length of the song
+ #
+ # def length=(minutes)
+ # super(minutes.to_i * 60)
+ # end
+ #
+ # def length
+ # super / 60
+ # end
+ # end
+ #
+ # == Attribute query methods
+ #
+ # In addition to the basic accessors, query methods are also automatically available on the Active Record object.
+ # Query methods allow you to test whether an attribute value is present.
+ # Additionally, when dealing with numeric values, a query method will return false if the value is zero.
+ #
+ # For example, an Active Record User with the <tt>name</tt> attribute has a <tt>name?</tt> method that you can call
+ # to determine whether the user has a name:
+ #
+ # user = User.new(name: "David")
+ # user.name? # => true
+ #
+ # anonymous = User.new(name: "")
+ # anonymous.name? # => false
+ #
+ # == Accessing attributes before they have been typecasted
+ #
+ # Sometimes you want to be able to read the raw attribute data without having the column-determined
+ # typecast run its course first. That can be done by using the <tt><attribute>_before_type_cast</tt>
+ # accessors that all attributes have. For example, if your Account model has a <tt>balance</tt> attribute,
+ # you can call <tt>account.balance_before_type_cast</tt> or <tt>account.id_before_type_cast</tt>.
+ #
+ # This is especially useful in validation situations where the user might supply a string for an
+ # integer field and you want to display the original string back in an error message. Accessing the
+ # attribute normally would typecast the string to 0, which isn't what you want.
+ #
+ # == Dynamic attribute-based finders
+ #
+ # Dynamic attribute-based finders are a mildly deprecated way of getting (and/or creating) objects
+ # by simple queries without turning to SQL. They work by appending the name of an attribute
+ # to <tt>find_by_</tt> like <tt>Person.find_by_user_name</tt>.
+ # Instead of writing <tt>Person.find_by(user_name: user_name)</tt>, you can use
+ # <tt>Person.find_by_user_name(user_name)</tt>.
+ #
+ # It's possible to add an exclamation point (!) on the end of the dynamic finders to get them to raise an
+ # ActiveRecord::RecordNotFound error if they do not return any records,
+ # like <tt>Person.find_by_last_name!</tt>.
+ #
+ # It's also possible to use multiple attributes in the same <tt>find_by_</tt> by separating them with
+ # "_and_".
+ #
+ # Person.find_by(user_name: user_name, password: password)
+ # Person.find_by_user_name_and_password(user_name, password) # with dynamic finder
+ #
+ # It's even possible to call these dynamic finder methods on relations and named scopes.
+ #
+ # Payment.order("created_on").find_by_amount(50)
+ #
+ # == Saving arrays, hashes, and other non-mappable objects in text columns
+ #
+ # Active Record can serialize any object in text columns using YAML. To do so, you must
+ # specify this with a call to the class method
+ # {serialize}[rdoc-ref:AttributeMethods::Serialization::ClassMethods#serialize].
+ # This makes it possible to store arrays, hashes, and other non-mappable objects without doing
+ # any additional work.
+ #
+ # class User < ActiveRecord::Base
+ # serialize :preferences
+ # end
+ #
+ # user = User.create(preferences: { "background" => "black", "display" => large })
+ # User.find(user.id).preferences # => { "background" => "black", "display" => large }
+ #
+ # You can also specify a class option as the second parameter that'll raise an exception
+ # if a serialized object is retrieved as a descendant of a class not in the hierarchy.
+ #
+ # class User < ActiveRecord::Base
+ # serialize :preferences, Hash
+ # end
+ #
+ # user = User.create(preferences: %w( one two three ))
+ # User.find(user.id).preferences # raises SerializationTypeMismatch
+ #
+ # When you specify a class option, the default value for that attribute will be a new
+ # instance of that class.
+ #
+ # class User < ActiveRecord::Base
+ # serialize :preferences, OpenStruct
+ # end
+ #
+ # user = User.new
+ # user.preferences.theme_color = "red"
+ #
+ #
+ # == Single table inheritance
+ #
+ # Active Record allows inheritance by storing the name of the class in a
+ # column that is named "type" by default. See ActiveRecord::Inheritance for
+ # more details.
+ #
+ # == Connection to multiple databases in different models
+ #
+ # Connections are usually created through
+ # {ActiveRecord::Base.establish_connection}[rdoc-ref:ConnectionHandling#establish_connection] and retrieved
+ # by ActiveRecord::Base.connection. All classes inheriting from ActiveRecord::Base will use this
+ # connection. But you can also set a class-specific connection. For example, if Course is an
+ # ActiveRecord::Base, but resides in a different database, you can just say <tt>Course.establish_connection</tt>
+ # and Course and all of its subclasses will use this connection instead.
+ #
+ # This feature is implemented by keeping a connection pool in ActiveRecord::Base that is
+ # a hash indexed by the class. If a connection is requested, the
+ # {ActiveRecord::Base.retrieve_connection}[rdoc-ref:ConnectionHandling#retrieve_connection] method
+ # will go up the class-hierarchy until a connection is found in the connection pool.
+ #
+ # == Exceptions
+ #
+ # * ActiveRecordError - Generic error class and superclass of all other errors raised by Active Record.
+ # * AdapterNotSpecified - The configuration hash used in
+ # {ActiveRecord::Base.establish_connection}[rdoc-ref:ConnectionHandling#establish_connection]
+ # didn't include an <tt>:adapter</tt> key.
+ # * AdapterNotFound - The <tt>:adapter</tt> key used in
+ # {ActiveRecord::Base.establish_connection}[rdoc-ref:ConnectionHandling#establish_connection]
+ # specified a non-existent adapter
+ # (or a bad spelling of an existing one).
+ # * AssociationTypeMismatch - The object assigned to the association wasn't of the type
+ # specified in the association definition.
+ # * AttributeAssignmentError - An error occurred while doing a mass assignment through the
+ # {ActiveRecord::Base#attributes=}[rdoc-ref:AttributeAssignment#attributes=] method.
+ # You can inspect the +attribute+ property of the exception object to determine which attribute
+ # triggered the error.
+ # * ConnectionNotEstablished - No connection has been established.
+ # Use {ActiveRecord::Base.establish_connection}[rdoc-ref:ConnectionHandling#establish_connection] before querying.
+ # * MultiparameterAssignmentErrors - Collection of errors that occurred during a mass assignment using the
+ # {ActiveRecord::Base#attributes=}[rdoc-ref:AttributeAssignment#attributes=] method.
+ # The +errors+ property of this exception contains an array of
+ # AttributeAssignmentError
+ # objects that should be inspected to determine which attributes triggered the errors.
+ # * RecordInvalid - raised by {ActiveRecord::Base#save!}[rdoc-ref:Persistence#save!] and
+ # {ActiveRecord::Base.create!}[rdoc-ref:Persistence::ClassMethods#create!]
+ # when the record is invalid.
+ # * RecordNotFound - No record responded to the {ActiveRecord::Base.find}[rdoc-ref:FinderMethods#find] method.
+ # Either the row with the given ID doesn't exist or the row didn't meet the additional restrictions.
+ # Some {ActiveRecord::Base.find}[rdoc-ref:FinderMethods#find] calls do not raise this exception to signal
+ # nothing was found, please check its documentation for further details.
+ # * SerializationTypeMismatch - The serialized object wasn't of the class specified as the second parameter.
+ # * StatementInvalid - The database server rejected the SQL statement. The precise error is added in the message.
+ #
+ # *Note*: The attributes listed are class-level attributes (accessible from both the class and instance level).
+ # So it's possible to assign a logger to the class through <tt>Base.logger=</tt> which will then be used by all
+ # instances in the current object space.
+ class Base
+ extend ActiveModel::Naming
+
+ extend ActiveSupport::Benchmarkable
+ extend ActiveSupport::DescendantsTracker
+
+ extend ConnectionHandling
+ extend QueryCache::ClassMethods
+ extend Querying
+ extend Translation
+ extend DynamicMatchers
+ extend Explain
+ extend Enum
+ extend Delegation::DelegateCache
+ extend CollectionCacheKey
+
+ include Core
+ include Persistence
+ include ReadonlyAttributes
+ include ModelSchema
+ include Inheritance
+ include Scoping
+ include Sanitization
+ include AttributeAssignment
+ include ActiveModel::Conversion
+ include Integration
+ include Validations
+ include CounterCache
+ include Attributes
+ include AttributeDecorators
+ include Locking::Optimistic
+ include Locking::Pessimistic
+ include DefineCallbacks
+ include AttributeMethods
+ include Callbacks
+ include Timestamp
+ include Associations
+ include ActiveModel::SecurePassword
+ include AutosaveAssociation
+ include NestedAttributes
+ include Aggregations
+ include Transactions
+ include TouchLater
+ include NoTouching
+ include Reflection
+ include Serialization
+ include Store
+ include SecureToken
+ include Suppressor
+ end
+
+ ActiveSupport.run_load_hooks(:active_record, Base)
+end
diff --git a/activerecord/lib/active_record/callbacks.rb b/activerecord/lib/active_record/callbacks.rb
new file mode 100644
index 0000000000..a2439e6ec7
--- /dev/null
+++ b/activerecord/lib/active_record/callbacks.rb
@@ -0,0 +1,349 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record \Callbacks
+ #
+ # \Callbacks are hooks into the life cycle of an Active Record object that allow you to trigger logic
+ # before or after an alteration of the object state. This can be used to make sure that associated and
+ # dependent objects are deleted when {ActiveRecord::Base#destroy}[rdoc-ref:Persistence#destroy] is called (by overwriting +before_destroy+) or
+ # to massage attributes before they're validated (by overwriting +before_validation+).
+ # As an example of the callbacks initiated, consider the {ActiveRecord::Base#save}[rdoc-ref:Persistence#save] call for a new record:
+ #
+ # * (-) <tt>save</tt>
+ # * (-) <tt>valid</tt>
+ # * (1) <tt>before_validation</tt>
+ # * (-) <tt>validate</tt>
+ # * (2) <tt>after_validation</tt>
+ # * (3) <tt>before_save</tt>
+ # * (4) <tt>before_create</tt>
+ # * (-) <tt>create</tt>
+ # * (5) <tt>after_create</tt>
+ # * (6) <tt>after_save</tt>
+ # * (7) <tt>after_commit</tt>
+ #
+ # Also, an <tt>after_rollback</tt> callback can be configured to be triggered whenever a rollback is issued.
+ # Check out ActiveRecord::Transactions for more details about <tt>after_commit</tt> and
+ # <tt>after_rollback</tt>.
+ #
+ # Additionally, an <tt>after_touch</tt> callback is triggered whenever an
+ # object is touched.
+ #
+ # Lastly an <tt>after_find</tt> and <tt>after_initialize</tt> callback is triggered for each object that
+ # is found and instantiated by a finder, with <tt>after_initialize</tt> being triggered after new objects
+ # are instantiated as well.
+ #
+ # There are nineteen callbacks in total, which give you immense power to react and prepare for each state in the
+ # Active Record life cycle. The sequence for calling {ActiveRecord::Base#save}[rdoc-ref:Persistence#save] for an existing record is similar,
+ # except that each <tt>_create</tt> callback is replaced by the corresponding <tt>_update</tt> callback.
+ #
+ # Examples:
+ # class CreditCard < ActiveRecord::Base
+ # # Strip everything but digits, so the user can specify "555 234 34" or
+ # # "5552-3434" and both will mean "55523434"
+ # before_validation(on: :create) do
+ # self.number = number.gsub(/[^0-9]/, "") if attribute_present?("number")
+ # end
+ # end
+ #
+ # class Subscription < ActiveRecord::Base
+ # before_create :record_signup
+ #
+ # private
+ # def record_signup
+ # self.signed_up_on = Date.today
+ # end
+ # end
+ #
+ # class Firm < ActiveRecord::Base
+ # # Disables access to the system, for associated clients and people when the firm is destroyed
+ # before_destroy { |record| Person.where(firm_id: record.id).update_all(access: 'disabled') }
+ # before_destroy { |record| Client.where(client_of: record.id).update_all(access: 'disabled') }
+ # end
+ #
+ # == Inheritable callback queues
+ #
+ # Besides the overwritable callback methods, it's also possible to register callbacks through the
+ # use of the callback macros. Their main advantage is that the macros add behavior into a callback
+ # queue that is kept intact down through an inheritance hierarchy.
+ #
+ # class Topic < ActiveRecord::Base
+ # before_destroy :destroy_author
+ # end
+ #
+ # class Reply < Topic
+ # before_destroy :destroy_readers
+ # end
+ #
+ # Now, when <tt>Topic#destroy</tt> is run only +destroy_author+ is called. When <tt>Reply#destroy</tt> is
+ # run, both +destroy_author+ and +destroy_readers+ are called. Contrast this to the following situation
+ # where the +before_destroy+ method is overridden:
+ #
+ # class Topic < ActiveRecord::Base
+ # def before_destroy() destroy_author end
+ # end
+ #
+ # class Reply < Topic
+ # def before_destroy() destroy_readers end
+ # end
+ #
+ # In that case, <tt>Reply#destroy</tt> would only run +destroy_readers+ and _not_ +destroy_author+.
+ # So, use the callback macros when you want to ensure that a certain callback is called for the entire
+ # hierarchy, and use the regular overwritable methods when you want to leave it up to each descendant
+ # to decide whether they want to call +super+ and trigger the inherited callbacks.
+ #
+ # *IMPORTANT:* In order for inheritance to work for the callback queues, you must specify the
+ # callbacks before specifying the associations. Otherwise, you might trigger the loading of a
+ # child before the parent has registered the callbacks and they won't be inherited.
+ #
+ # == Types of callbacks
+ #
+ # There are four types of callbacks accepted by the callback macros: Method references (symbol), callback objects,
+ # inline methods (using a proc), and inline eval methods (using a string). Method references and callback objects
+ # are the recommended approaches, inline methods using a proc are sometimes appropriate (such as for
+ # creating mix-ins), and inline eval methods are deprecated.
+ #
+ # The method reference callbacks work by specifying a protected or private method available in the object, like this:
+ #
+ # class Topic < ActiveRecord::Base
+ # before_destroy :delete_parents
+ #
+ # private
+ # def delete_parents
+ # self.class.delete_all "parent_id = #{id}"
+ # end
+ # end
+ #
+ # The callback objects have methods named after the callback called with the record as the only parameter, such as:
+ #
+ # class BankAccount < ActiveRecord::Base
+ # before_save EncryptionWrapper.new
+ # after_save EncryptionWrapper.new
+ # after_initialize EncryptionWrapper.new
+ # end
+ #
+ # class EncryptionWrapper
+ # def before_save(record)
+ # record.credit_card_number = encrypt(record.credit_card_number)
+ # end
+ #
+ # def after_save(record)
+ # record.credit_card_number = decrypt(record.credit_card_number)
+ # end
+ #
+ # alias_method :after_initialize, :after_save
+ #
+ # private
+ # def encrypt(value)
+ # # Secrecy is committed
+ # end
+ #
+ # def decrypt(value)
+ # # Secrecy is unveiled
+ # end
+ # end
+ #
+ # So you specify the object you want messaged on a given callback. When that callback is triggered, the object has
+ # a method by the name of the callback messaged. You can make these callbacks more flexible by passing in other
+ # initialization data such as the name of the attribute to work with:
+ #
+ # class BankAccount < ActiveRecord::Base
+ # before_save EncryptionWrapper.new("credit_card_number")
+ # after_save EncryptionWrapper.new("credit_card_number")
+ # after_initialize EncryptionWrapper.new("credit_card_number")
+ # end
+ #
+ # class EncryptionWrapper
+ # def initialize(attribute)
+ # @attribute = attribute
+ # end
+ #
+ # def before_save(record)
+ # record.send("#{@attribute}=", encrypt(record.send("#{@attribute}")))
+ # end
+ #
+ # def after_save(record)
+ # record.send("#{@attribute}=", decrypt(record.send("#{@attribute}")))
+ # end
+ #
+ # alias_method :after_initialize, :after_save
+ #
+ # private
+ # def encrypt(value)
+ # # Secrecy is committed
+ # end
+ #
+ # def decrypt(value)
+ # # Secrecy is unveiled
+ # end
+ # end
+ #
+ # == <tt>before_validation*</tt> returning statements
+ #
+ # If the +before_validation+ callback throws +:abort+, the process will be
+ # aborted and {ActiveRecord::Base#save}[rdoc-ref:Persistence#save] will return +false+.
+ # If {ActiveRecord::Base#save!}[rdoc-ref:Persistence#save!] is called it will raise an ActiveRecord::RecordInvalid exception.
+ # Nothing will be appended to the errors object.
+ #
+ # == Canceling callbacks
+ #
+ # If a <tt>before_*</tt> callback throws +:abort+, all the later callbacks and
+ # the associated action are cancelled.
+ # Callbacks are generally run in the order they are defined, with the exception of callbacks defined as
+ # methods on the model, which are called last.
+ #
+ # == Ordering callbacks
+ #
+ # Sometimes the code needs that the callbacks execute in a specific order. For example, a +before_destroy+
+ # callback (+log_children+ in this case) should be executed before the children get destroyed by the
+ # <tt>dependent: :destroy</tt> option.
+ #
+ # Let's look at the code below:
+ #
+ # class Topic < ActiveRecord::Base
+ # has_many :children, dependent: :destroy
+ #
+ # before_destroy :log_children
+ #
+ # private
+ # def log_children
+ # # Child processing
+ # end
+ # end
+ #
+ # In this case, the problem is that when the +before_destroy+ callback is executed, the children are not available
+ # because the {ActiveRecord::Base#destroy}[rdoc-ref:Persistence#destroy] callback gets executed first.
+ # You can use the +prepend+ option on the +before_destroy+ callback to avoid this.
+ #
+ # class Topic < ActiveRecord::Base
+ # has_many :children, dependent: :destroy
+ #
+ # before_destroy :log_children, prepend: true
+ #
+ # private
+ # def log_children
+ # # Child processing
+ # end
+ # end
+ #
+ # This way, the +before_destroy+ gets executed before the <tt>dependent: :destroy</tt> is called, and the data is still available.
+ #
+ # Also, there are cases when you want several callbacks of the same type to
+ # be executed in order.
+ #
+ # For example:
+ #
+ # class Topic
+ # has_many :children
+ #
+ # after_save :log_children
+ # after_save :do_something_else
+ #
+ # private
+ #
+ # def log_chidren
+ # # Child processing
+ # end
+ #
+ # def do_something_else
+ # # Something else
+ # end
+ # end
+ #
+ # In this case the +log_children+ gets executed before +do_something_else+.
+ # The same applies to all non-transactional callbacks.
+ #
+ # In case there are multiple transactional callbacks as seen below, the order
+ # is reversed.
+ #
+ # For example:
+ #
+ # class Topic
+ # has_many :children
+ #
+ # after_commit :log_children
+ # after_commit :do_something_else
+ #
+ # private
+ #
+ # def log_chidren
+ # # Child processing
+ # end
+ #
+ # def do_something_else
+ # # Something else
+ # end
+ # end
+ #
+ # In this case the +do_something_else+ gets executed before +log_children+.
+ #
+ # == \Transactions
+ #
+ # The entire callback chain of a {#save}[rdoc-ref:Persistence#save], {#save!}[rdoc-ref:Persistence#save!],
+ # or {#destroy}[rdoc-ref:Persistence#destroy] call runs within a transaction. That includes <tt>after_*</tt> hooks.
+ # If everything goes fine a COMMIT is executed once the chain has been completed.
+ #
+ # If a <tt>before_*</tt> callback cancels the action a ROLLBACK is issued. You
+ # can also trigger a ROLLBACK raising an exception in any of the callbacks,
+ # including <tt>after_*</tt> hooks. Note, however, that in that case the client
+ # needs to be aware of it because an ordinary {#save}[rdoc-ref:Persistence#save] will raise such exception
+ # instead of quietly returning +false+.
+ #
+ # == Debugging callbacks
+ #
+ # The callback chain is accessible via the <tt>_*_callbacks</tt> method on an object. Active Model \Callbacks support
+ # <tt>:before</tt>, <tt>:after</tt> and <tt>:around</tt> as values for the <tt>kind</tt> property. The <tt>kind</tt> property
+ # defines what part of the chain the callback runs in.
+ #
+ # To find all callbacks in the before_save callback chain:
+ #
+ # Topic._save_callbacks.select { |cb| cb.kind.eql?(:before) }
+ #
+ # Returns an array of callback objects that form the before_save chain.
+ #
+ # To further check if the before_save chain contains a proc defined as <tt>rest_when_dead</tt> use the <tt>filter</tt> property of the callback object:
+ #
+ # Topic._save_callbacks.select { |cb| cb.kind.eql?(:before) }.collect(&:filter).include?(:rest_when_dead)
+ #
+ # Returns true or false depending on whether the proc is contained in the before_save callback chain on a Topic model.
+ #
+ module Callbacks
+ extend ActiveSupport::Concern
+
+ CALLBACKS = [
+ :after_initialize, :after_find, :after_touch, :before_validation, :after_validation,
+ :before_save, :around_save, :after_save, :before_create, :around_create,
+ :after_create, :before_update, :around_update, :after_update,
+ :before_destroy, :around_destroy, :after_destroy, :after_commit, :after_rollback
+ ]
+
+ def destroy #:nodoc:
+ @_destroy_callback_already_called ||= false
+ return if @_destroy_callback_already_called
+ @_destroy_callback_already_called = true
+ _run_destroy_callbacks { super }
+ rescue RecordNotDestroyed => e
+ @_association_destroy_exception = e
+ false
+ ensure
+ @_destroy_callback_already_called = false
+ end
+
+ def touch(*) #:nodoc:
+ _run_touch_callbacks { super }
+ end
+
+ private
+
+ def create_or_update(*)
+ _run_save_callbacks { super }
+ end
+
+ def _create_record
+ _run_create_callbacks { super }
+ end
+
+ def _update_record(*)
+ _run_update_callbacks { super }
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/coders/json.rb b/activerecord/lib/active_record/coders/json.rb
new file mode 100644
index 0000000000..a69b38487e
--- /dev/null
+++ b/activerecord/lib/active_record/coders/json.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Coders # :nodoc:
+ class JSON # :nodoc:
+ def self.dump(obj)
+ ActiveSupport::JSON.encode(obj)
+ end
+
+ def self.load(json)
+ ActiveSupport::JSON.decode(json) unless json.blank?
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/coders/yaml_column.rb b/activerecord/lib/active_record/coders/yaml_column.rb
new file mode 100644
index 0000000000..11559141c7
--- /dev/null
+++ b/activerecord/lib/active_record/coders/yaml_column.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require "yaml"
+
+module ActiveRecord
+ module Coders # :nodoc:
+ class YAMLColumn # :nodoc:
+ attr_accessor :object_class
+
+ def initialize(attr_name, object_class = Object)
+ @attr_name = attr_name
+ @object_class = object_class
+ check_arity_of_constructor
+ end
+
+ def dump(obj)
+ return if obj.nil?
+
+ assert_valid_value(obj, action: "dump")
+ YAML.dump obj
+ end
+
+ def load(yaml)
+ return object_class.new if object_class != Object && yaml.nil?
+ return yaml unless yaml.is_a?(String) && /^---/.match?(yaml)
+ obj = YAML.load(yaml)
+
+ assert_valid_value(obj, action: "load")
+ obj ||= object_class.new if object_class != Object
+
+ obj
+ end
+
+ def assert_valid_value(obj, action:)
+ unless obj.nil? || obj.is_a?(object_class)
+ raise SerializationTypeMismatch,
+ "can't #{action} `#{@attr_name}`: was supposed to be a #{object_class}, but was a #{obj.class}. -- #{obj.inspect}"
+ end
+ end
+
+ private
+
+ def check_arity_of_constructor
+ load(nil)
+ rescue ArgumentError
+ raise ArgumentError, "Cannot serialize #{object_class}. Classes passed to `serialize` must have a 0 argument constructor."
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/collection_cache_key.rb b/activerecord/lib/active_record/collection_cache_key.rb
new file mode 100644
index 0000000000..b1937a3c68
--- /dev/null
+++ b/activerecord/lib/active_record/collection_cache_key.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module CollectionCacheKey
+ def collection_cache_key(collection = all, timestamp_column = :updated_at) # :nodoc:
+ query_signature = Digest::MD5.hexdigest(collection.to_sql)
+ key = "#{collection.model_name.cache_key}/query-#{query_signature}"
+
+ if collection.loaded?
+ size = collection.size
+ if size > 0
+ timestamp = collection.max_by(&timestamp_column)._read_attribute(timestamp_column)
+ end
+ else
+ column_type = type_for_attribute(timestamp_column.to_s)
+ column = "#{connection.quote_table_name(collection.table_name)}.#{connection.quote_column_name(timestamp_column)}"
+ select_values = "COUNT(*) AS #{connection.quote_column_name("size")}, MAX(%s) AS timestamp"
+
+ if collection.has_limit_or_offset?
+ query = collection.spawn
+ query.select_values = [column]
+ subquery_alias = "subquery_for_cache_key"
+ subquery_column = "#{subquery_alias}.#{timestamp_column}"
+ subquery = query.arel.as(subquery_alias)
+ arel = Arel::SelectManager.new(subquery).project(select_values % subquery_column)
+ else
+ query = collection.unscope(:order)
+ query.select_values = [select_values % column]
+ arel = query.arel
+ end
+
+ result = connection.select_one(arel, nil)
+
+ if result.blank?
+ size = 0
+ timestamp = nil
+ else
+ size = result["size"]
+ timestamp = column_type.deserialize(result["timestamp"])
+ end
+
+ end
+
+ if timestamp
+ "#{key}-#{size}-#{timestamp.utc.to_s(cache_timestamp_format)}"
+ else
+ "#{key}-#{size}"
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb b/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb
new file mode 100644
index 0000000000..c11b7b012f
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb
@@ -0,0 +1,989 @@
+# frozen_string_literal: true
+
+require "thread"
+require "concurrent/map"
+require "monitor"
+
+module ActiveRecord
+ # Raised when a connection could not be obtained within the connection
+ # acquisition timeout period: because max connections in pool
+ # are in use.
+ class ConnectionTimeoutError < ConnectionNotEstablished
+ end
+
+ # Raised when a pool was unable to get ahold of all its connections
+ # to perform a "group" action such as
+ # {ActiveRecord::Base.connection_pool.disconnect!}[rdoc-ref:ConnectionAdapters::ConnectionPool#disconnect!]
+ # or {ActiveRecord::Base.clear_reloadable_connections!}[rdoc-ref:ConnectionAdapters::ConnectionHandler#clear_reloadable_connections!].
+ class ExclusiveConnectionTimeoutError < ConnectionTimeoutError
+ end
+
+ module ConnectionAdapters
+ # Connection pool base class for managing Active Record database
+ # connections.
+ #
+ # == Introduction
+ #
+ # A connection pool synchronizes thread access to a limited number of
+ # database connections. The basic idea is that each thread checks out a
+ # database connection from the pool, uses that connection, and checks the
+ # connection back in. ConnectionPool is completely thread-safe, and will
+ # ensure that a connection cannot be used by two threads at the same time,
+ # as long as ConnectionPool's contract is correctly followed. It will also
+ # handle cases in which there are more threads than connections: if all
+ # connections have been checked out, and a thread tries to checkout a
+ # connection anyway, then ConnectionPool will wait until some other thread
+ # has checked in a connection.
+ #
+ # == Obtaining (checking out) a connection
+ #
+ # Connections can be obtained and used from a connection pool in several
+ # ways:
+ #
+ # 1. Simply use {ActiveRecord::Base.connection}[rdoc-ref:ConnectionHandling.connection]
+ # as with Active Record 2.1 and
+ # earlier (pre-connection-pooling). Eventually, when you're done with
+ # the connection(s) and wish it to be returned to the pool, you call
+ # {ActiveRecord::Base.clear_active_connections!}[rdoc-ref:ConnectionAdapters::ConnectionHandler#clear_active_connections!].
+ # This will be the default behavior for Active Record when used in conjunction with
+ # Action Pack's request handling cycle.
+ # 2. Manually check out a connection from the pool with
+ # {ActiveRecord::Base.connection_pool.checkout}[rdoc-ref:#checkout]. You are responsible for
+ # returning this connection to the pool when finished by calling
+ # {ActiveRecord::Base.connection_pool.checkin(connection)}[rdoc-ref:#checkin].
+ # 3. Use {ActiveRecord::Base.connection_pool.with_connection(&block)}[rdoc-ref:#with_connection], which
+ # obtains a connection, yields it as the sole argument to the block,
+ # and returns it to the pool after the block completes.
+ #
+ # Connections in the pool are actually AbstractAdapter objects (or objects
+ # compatible with AbstractAdapter's interface).
+ #
+ # == Options
+ #
+ # There are several connection-pooling-related options that you can add to
+ # your database connection configuration:
+ #
+ # * +pool+: number indicating size of connection pool (default 5)
+ # * +checkout_timeout+: number of seconds to block and wait for a connection
+ # before giving up and raising a timeout error (default 5 seconds).
+ # * +reaping_frequency+: frequency in seconds to periodically run the
+ # Reaper, which attempts to find and recover connections from dead
+ # threads, which can occur if a programmer forgets to close a
+ # connection at the end of a thread or a thread dies unexpectedly.
+ # Regardless of this setting, the Reaper will be invoked before every
+ # blocking wait. (Default +nil+, which means don't schedule the Reaper).
+ #
+ #--
+ # Synchronization policy:
+ # * all public methods can be called outside +synchronize+
+ # * access to these instance variables needs to be in +synchronize+:
+ # * @connections
+ # * @now_connecting
+ # * private methods that require being called in a +synchronize+ blocks
+ # are now explicitly documented
+ class ConnectionPool
+ # Threadsafe, fair, FIFO queue. Meant to be used by ConnectionPool
+ # with which it shares a Monitor. But could be a generic Queue.
+ #
+ # The Queue in stdlib's 'thread' could replace this class except
+ # stdlib's doesn't support waiting with a timeout.
+ class Queue
+ def initialize(lock = Monitor.new)
+ @lock = lock
+ @cond = @lock.new_cond
+ @num_waiting = 0
+ @queue = []
+ end
+
+ # Test if any threads are currently waiting on the queue.
+ def any_waiting?
+ synchronize do
+ @num_waiting > 0
+ end
+ end
+
+ # Returns the number of threads currently waiting on this
+ # queue.
+ def num_waiting
+ synchronize do
+ @num_waiting
+ end
+ end
+
+ # Add +element+ to the queue. Never blocks.
+ def add(element)
+ synchronize do
+ @queue.push element
+ @cond.signal
+ end
+ end
+
+ # If +element+ is in the queue, remove and return it, or +nil+.
+ def delete(element)
+ synchronize do
+ @queue.delete(element)
+ end
+ end
+
+ # Remove all elements from the queue.
+ def clear
+ synchronize do
+ @queue.clear
+ end
+ end
+
+ # Remove the head of the queue.
+ #
+ # If +timeout+ is not given, remove and return the head the
+ # queue if the number of available elements is strictly
+ # greater than the number of threads currently waiting (that
+ # is, don't jump ahead in line). Otherwise, return +nil+.
+ #
+ # If +timeout+ is given, block if there is no element
+ # available, waiting up to +timeout+ seconds for an element to
+ # become available.
+ #
+ # Raises:
+ # - ActiveRecord::ConnectionTimeoutError if +timeout+ is given and no element
+ # becomes available within +timeout+ seconds,
+ def poll(timeout = nil)
+ synchronize { internal_poll(timeout) }
+ end
+
+ private
+
+ def internal_poll(timeout)
+ no_wait_poll || (timeout && wait_poll(timeout))
+ end
+
+ def synchronize(&block)
+ @lock.synchronize(&block)
+ end
+
+ # Test if the queue currently contains any elements.
+ def any?
+ !@queue.empty?
+ end
+
+ # A thread can remove an element from the queue without
+ # waiting if and only if the number of currently available
+ # connections is strictly greater than the number of waiting
+ # threads.
+ def can_remove_no_wait?
+ @queue.size > @num_waiting
+ end
+
+ # Removes and returns the head of the queue if possible, or +nil+.
+ def remove
+ @queue.shift
+ end
+
+ # Remove and return the head the queue if the number of
+ # available elements is strictly greater than the number of
+ # threads currently waiting. Otherwise, return +nil+.
+ def no_wait_poll
+ remove if can_remove_no_wait?
+ end
+
+ # Waits on the queue up to +timeout+ seconds, then removes and
+ # returns the head of the queue.
+ def wait_poll(timeout)
+ @num_waiting += 1
+
+ t0 = Time.now
+ elapsed = 0
+ loop do
+ @cond.wait(timeout - elapsed)
+
+ return remove if any?
+
+ elapsed = Time.now - t0
+ if elapsed >= timeout
+ msg = "could not obtain a connection from the pool within %0.3f seconds (waited %0.3f seconds); all pooled connections were in use" %
+ [timeout, elapsed]
+ raise ConnectionTimeoutError, msg
+ end
+ end
+ ensure
+ @num_waiting -= 1
+ end
+ end
+
+ # Adds the ability to turn a basic fair FIFO queue into one
+ # biased to some thread.
+ module BiasableQueue # :nodoc:
+ class BiasedConditionVariable # :nodoc:
+ # semantics of condition variables guarantee that +broadcast+, +broadcast_on_biased+,
+ # +signal+ and +wait+ methods are only called while holding a lock
+ def initialize(lock, other_cond, preferred_thread)
+ @real_cond = lock.new_cond
+ @other_cond = other_cond
+ @preferred_thread = preferred_thread
+ @num_waiting_on_real_cond = 0
+ end
+
+ def broadcast
+ broadcast_on_biased
+ @other_cond.broadcast
+ end
+
+ def broadcast_on_biased
+ @num_waiting_on_real_cond = 0
+ @real_cond.broadcast
+ end
+
+ def signal
+ if @num_waiting_on_real_cond > 0
+ @num_waiting_on_real_cond -= 1
+ @real_cond
+ else
+ @other_cond
+ end.signal
+ end
+
+ def wait(timeout)
+ if Thread.current == @preferred_thread
+ @num_waiting_on_real_cond += 1
+ @real_cond
+ else
+ @other_cond
+ end.wait(timeout)
+ end
+ end
+
+ def with_a_bias_for(thread)
+ previous_cond = nil
+ new_cond = nil
+ synchronize do
+ previous_cond = @cond
+ @cond = new_cond = BiasedConditionVariable.new(@lock, @cond, thread)
+ end
+ yield
+ ensure
+ synchronize do
+ @cond = previous_cond if previous_cond
+ new_cond.broadcast_on_biased if new_cond # wake up any remaining sleepers
+ end
+ end
+ end
+
+ # Connections must be leased while holding the main pool mutex. This is
+ # an internal subclass that also +.leases+ returned connections while
+ # still in queue's critical section (queue synchronizes with the same
+ # +@lock+ as the main pool) so that a returned connection is already
+ # leased and there is no need to re-enter synchronized block.
+ class ConnectionLeasingQueue < Queue # :nodoc:
+ include BiasableQueue
+
+ private
+ def internal_poll(timeout)
+ conn = super
+ conn.lease if conn
+ conn
+ end
+ end
+
+ # Every +frequency+ seconds, the reaper will call +reap+ on +pool+.
+ # A reaper instantiated with a +nil+ frequency will never reap the
+ # connection pool.
+ #
+ # Configure the frequency by setting "reaping_frequency" in your
+ # database yaml file.
+ class Reaper
+ attr_reader :pool, :frequency
+
+ def initialize(pool, frequency)
+ @pool = pool
+ @frequency = frequency
+ end
+
+ def run
+ return unless frequency
+ Thread.new(frequency, pool) { |t, p|
+ loop do
+ sleep t
+ p.reap
+ end
+ }
+ end
+ end
+
+ include MonitorMixin
+ include QueryCache::ConnectionPoolConfiguration
+
+ attr_accessor :automatic_reconnect, :checkout_timeout, :schema_cache
+ attr_reader :spec, :connections, :size, :reaper
+
+ # Creates a new ConnectionPool object. +spec+ is a ConnectionSpecification
+ # object which describes database connection information (e.g. adapter,
+ # host name, username, password, etc), as well as the maximum size for
+ # this ConnectionPool.
+ #
+ # The default ConnectionPool maximum size is 5.
+ def initialize(spec)
+ super()
+
+ @spec = spec
+
+ @checkout_timeout = (spec.config[:checkout_timeout] && spec.config[:checkout_timeout].to_f) || 5
+ @reaper = Reaper.new(self, (spec.config[:reaping_frequency] && spec.config[:reaping_frequency].to_f))
+ @reaper.run
+
+ # default max pool size to 5
+ @size = (spec.config[:pool] && spec.config[:pool].to_i) || 5
+
+ # This variable tracks the cache of threads mapped to reserved connections, with the
+ # sole purpose of speeding up the +connection+ method. It is not the authoritative
+ # registry of which thread owns which connection. Connection ownership is tracked by
+ # the +connection.owner+ attr on each +connection+ instance.
+ # The invariant works like this: if there is mapping of <tt>thread => conn</tt>,
+ # then that +thread+ does indeed own that +conn+. However, an absence of a such
+ # mapping does not mean that the +thread+ doesn't own the said connection. In
+ # that case +conn.owner+ attr should be consulted.
+ # Access and modification of +@thread_cached_conns+ does not require
+ # synchronization.
+ @thread_cached_conns = Concurrent::Map.new(initial_capacity: @size)
+
+ @connections = []
+ @automatic_reconnect = true
+
+ # Connection pool allows for concurrent (outside the main +synchronize+ section)
+ # establishment of new connections. This variable tracks the number of threads
+ # currently in the process of independently establishing connections to the DB.
+ @now_connecting = 0
+
+ @threads_blocking_new_connections = 0
+
+ @available = ConnectionLeasingQueue.new self
+
+ @lock_thread = false
+ end
+
+ def lock_thread=(lock_thread)
+ if lock_thread
+ @lock_thread = Thread.current
+ else
+ @lock_thread = nil
+ end
+ end
+
+ # Retrieve the connection associated with the current thread, or call
+ # #checkout to obtain one if necessary.
+ #
+ # #connection can be called any number of times; the connection is
+ # held in a cache keyed by a thread.
+ def connection
+ @thread_cached_conns[connection_cache_key(@lock_thread || Thread.current)] ||= checkout
+ end
+
+ # Returns true if there is an open connection being used for the current thread.
+ #
+ # This method only works for connections that have been obtained through
+ # #connection or #with_connection methods. Connections obtained through
+ # #checkout will not be detected by #active_connection?
+ def active_connection?
+ @thread_cached_conns[connection_cache_key(Thread.current)]
+ end
+
+ # Signal that the thread is finished with the current connection.
+ # #release_connection releases the connection-thread association
+ # and returns the connection to the pool.
+ #
+ # This method only works for connections that have been obtained through
+ # #connection or #with_connection methods, connections obtained through
+ # #checkout will not be automatically released.
+ def release_connection(owner_thread = Thread.current)
+ if conn = @thread_cached_conns.delete(connection_cache_key(owner_thread))
+ checkin conn
+ end
+ end
+
+ # If a connection obtained through #connection or #with_connection methods
+ # already exists yield it to the block. If no such connection
+ # exists checkout a connection, yield it to the block, and checkin the
+ # connection when finished.
+ def with_connection
+ unless conn = @thread_cached_conns[connection_cache_key(Thread.current)]
+ conn = connection
+ fresh_connection = true
+ end
+ yield conn
+ ensure
+ release_connection if fresh_connection
+ end
+
+ # Returns true if a connection has already been opened.
+ def connected?
+ synchronize { @connections.any? }
+ end
+
+ # Disconnects all connections in the pool, and clears the pool.
+ #
+ # Raises:
+ # - ActiveRecord::ExclusiveConnectionTimeoutError if unable to gain ownership of all
+ # connections in the pool within a timeout interval (default duration is
+ # <tt>spec.config[:checkout_timeout] * 2</tt> seconds).
+ def disconnect(raise_on_acquisition_timeout = true)
+ with_exclusively_acquired_all_connections(raise_on_acquisition_timeout) do
+ synchronize do
+ @connections.each do |conn|
+ if conn.in_use?
+ conn.steal!
+ checkin conn
+ end
+ conn.disconnect!
+ end
+ @connections = []
+ @available.clear
+ end
+ end
+ end
+
+ # Disconnects all connections in the pool, and clears the pool.
+ #
+ # The pool first tries to gain ownership of all connections. If unable to
+ # do so within a timeout interval (default duration is
+ # <tt>spec.config[:checkout_timeout] * 2</tt> seconds), then the pool is forcefully
+ # disconnected without any regard for other connection owning threads.
+ def disconnect!
+ disconnect(false)
+ end
+
+ # Clears the cache which maps classes and re-connects connections that
+ # require reloading.
+ #
+ # Raises:
+ # - ActiveRecord::ExclusiveConnectionTimeoutError if unable to gain ownership of all
+ # connections in the pool within a timeout interval (default duration is
+ # <tt>spec.config[:checkout_timeout] * 2</tt> seconds).
+ def clear_reloadable_connections(raise_on_acquisition_timeout = true)
+ with_exclusively_acquired_all_connections(raise_on_acquisition_timeout) do
+ synchronize do
+ @connections.each do |conn|
+ if conn.in_use?
+ conn.steal!
+ checkin conn
+ end
+ conn.disconnect! if conn.requires_reloading?
+ end
+ @connections.delete_if(&:requires_reloading?)
+ @available.clear
+ end
+ end
+ end
+
+ # Clears the cache which maps classes and re-connects connections that
+ # require reloading.
+ #
+ # The pool first tries to gain ownership of all connections. If unable to
+ # do so within a timeout interval (default duration is
+ # <tt>spec.config[:checkout_timeout] * 2</tt> seconds), then the pool forcefully
+ # clears the cache and reloads connections without any regard for other
+ # connection owning threads.
+ def clear_reloadable_connections!
+ clear_reloadable_connections(false)
+ end
+
+ # Check-out a database connection from the pool, indicating that you want
+ # to use it. You should call #checkin when you no longer need this.
+ #
+ # This is done by either returning and leasing existing connection, or by
+ # creating a new connection and leasing it.
+ #
+ # If all connections are leased and the pool is at capacity (meaning the
+ # number of currently leased connections is greater than or equal to the
+ # size limit set), an ActiveRecord::ConnectionTimeoutError exception will be raised.
+ #
+ # Returns: an AbstractAdapter object.
+ #
+ # Raises:
+ # - ActiveRecord::ConnectionTimeoutError no connection can be obtained from the pool.
+ def checkout(checkout_timeout = @checkout_timeout)
+ checkout_and_verify(acquire_connection(checkout_timeout))
+ end
+
+ # Check-in a database connection back into the pool, indicating that you
+ # no longer need this connection.
+ #
+ # +conn+: an AbstractAdapter object, which was obtained by earlier by
+ # calling #checkout on this pool.
+ def checkin(conn)
+ conn.lock.synchronize do
+ synchronize do
+ remove_connection_from_thread_cache conn
+
+ conn._run_checkin_callbacks do
+ conn.expire
+ end
+
+ @available.add conn
+ end
+ end
+ end
+
+ # Remove a connection from the connection pool. The connection will
+ # remain open and active but will no longer be managed by this pool.
+ def remove(conn)
+ needs_new_connection = false
+
+ synchronize do
+ remove_connection_from_thread_cache conn
+
+ @connections.delete conn
+ @available.delete conn
+
+ # @available.any_waiting? => true means that prior to removing this
+ # conn, the pool was at its max size (@connections.size == @size).
+ # This would mean that any threads stuck waiting in the queue wouldn't
+ # know they could checkout_new_connection, so let's do it for them.
+ # Because condition-wait loop is encapsulated in the Queue class
+ # (that in turn is oblivious to ConnectionPool implementation), threads
+ # that are "stuck" there are helpless. They have no way of creating
+ # new connections and are completely reliant on us feeding available
+ # connections into the Queue.
+ needs_new_connection = @available.any_waiting?
+ end
+
+ # This is intentionally done outside of the synchronized section as we
+ # would like not to hold the main mutex while checking out new connections.
+ # Thus there is some chance that needs_new_connection information is now
+ # stale, we can live with that (bulk_make_new_connections will make
+ # sure not to exceed the pool's @size limit).
+ bulk_make_new_connections(1) if needs_new_connection
+ end
+
+ # Recover lost connections for the pool. A lost connection can occur if
+ # a programmer forgets to checkin a connection at the end of a thread
+ # or a thread dies unexpectedly.
+ def reap
+ stale_connections = synchronize do
+ @connections.select do |conn|
+ conn.in_use? && !conn.owner.alive?
+ end.each do |conn|
+ conn.steal!
+ end
+ end
+
+ stale_connections.each do |conn|
+ if conn.active?
+ conn.reset!
+ checkin conn
+ else
+ remove conn
+ end
+ end
+ end
+
+ def num_waiting_in_queue # :nodoc:
+ @available.num_waiting
+ end
+
+ # Return connection pool's usage statistic
+ # Example:
+ #
+ # ActiveRecord::Base.connection_pool.stat # => { size: 15, connections: 1, busy: 1, dead: 0, idle: 0, waiting: 0, checkout_timeout: 5 }
+ def stat
+ synchronize do
+ {
+ size: size,
+ connections: @connections.size,
+ busy: @connections.count { |c| c.in_use? && c.owner.alive? },
+ dead: @connections.count { |c| c.in_use? && !c.owner.alive? },
+ idle: @connections.count { |c| !c.in_use? },
+ waiting: num_waiting_in_queue,
+ checkout_timeout: checkout_timeout
+ }
+ end
+ end
+
+ private
+ #--
+ # this is unfortunately not concurrent
+ def bulk_make_new_connections(num_new_conns_needed)
+ num_new_conns_needed.times do
+ # try_to_checkout_new_connection will not exceed pool's @size limit
+ if new_conn = try_to_checkout_new_connection
+ # make the new_conn available to the starving threads stuck @available Queue
+ checkin(new_conn)
+ end
+ end
+ end
+
+ #--
+ # From the discussion on GitHub:
+ # https://github.com/rails/rails/pull/14938#commitcomment-6601951
+ # This hook-in method allows for easier monkey-patching fixes needed by
+ # JRuby users that use Fibers.
+ def connection_cache_key(thread)
+ thread
+ end
+
+ # Take control of all existing connections so a "group" action such as
+ # reload/disconnect can be performed safely. It is no longer enough to
+ # wrap it in +synchronize+ because some pool's actions are allowed
+ # to be performed outside of the main +synchronize+ block.
+ def with_exclusively_acquired_all_connections(raise_on_acquisition_timeout = true)
+ with_new_connections_blocked do
+ attempt_to_checkout_all_existing_connections(raise_on_acquisition_timeout)
+ yield
+ end
+ end
+
+ def attempt_to_checkout_all_existing_connections(raise_on_acquisition_timeout = true)
+ collected_conns = synchronize do
+ # account for our own connections
+ @connections.select { |conn| conn.owner == Thread.current }
+ end
+
+ newly_checked_out = []
+ timeout_time = Time.now + (@checkout_timeout * 2)
+
+ @available.with_a_bias_for(Thread.current) do
+ loop do
+ synchronize do
+ return if collected_conns.size == @connections.size && @now_connecting == 0
+ remaining_timeout = timeout_time - Time.now
+ remaining_timeout = 0 if remaining_timeout < 0
+ conn = checkout_for_exclusive_access(remaining_timeout)
+ collected_conns << conn
+ newly_checked_out << conn
+ end
+ end
+ end
+ rescue ExclusiveConnectionTimeoutError
+ # <tt>raise_on_acquisition_timeout == false</tt> means we are directed to ignore any
+ # timeouts and are expected to just give up: we've obtained as many connections
+ # as possible, note that in a case like that we don't return any of the
+ # +newly_checked_out+ connections.
+
+ if raise_on_acquisition_timeout
+ release_newly_checked_out = true
+ raise
+ end
+ rescue Exception # if something else went wrong
+ # this can't be a "naked" rescue, because we have should return conns
+ # even for non-StandardErrors
+ release_newly_checked_out = true
+ raise
+ ensure
+ if release_newly_checked_out && newly_checked_out
+ # releasing only those conns that were checked out in this method, conns
+ # checked outside this method (before it was called) are not for us to release
+ newly_checked_out.each { |conn| checkin(conn) }
+ end
+ end
+
+ #--
+ # Must be called in a synchronize block.
+ def checkout_for_exclusive_access(checkout_timeout)
+ checkout(checkout_timeout)
+ rescue ConnectionTimeoutError
+ # this block can't be easily moved into attempt_to_checkout_all_existing_connections's
+ # rescue block, because doing so would put it outside of synchronize section, without
+ # being in a critical section thread_report might become inaccurate
+ msg = "could not obtain ownership of all database connections in #{checkout_timeout} seconds".dup
+
+ thread_report = []
+ @connections.each do |conn|
+ unless conn.owner == Thread.current
+ thread_report << "#{conn} is owned by #{conn.owner}"
+ end
+ end
+
+ msg << " (#{thread_report.join(', ')})" if thread_report.any?
+
+ raise ExclusiveConnectionTimeoutError, msg
+ end
+
+ def with_new_connections_blocked
+ synchronize do
+ @threads_blocking_new_connections += 1
+ end
+
+ yield
+ ensure
+ num_new_conns_required = 0
+
+ synchronize do
+ @threads_blocking_new_connections -= 1
+
+ if @threads_blocking_new_connections.zero?
+ @available.clear
+
+ num_new_conns_required = num_waiting_in_queue
+
+ @connections.each do |conn|
+ next if conn.in_use?
+
+ @available.add conn
+ num_new_conns_required -= 1
+ end
+ end
+ end
+
+ bulk_make_new_connections(num_new_conns_required) if num_new_conns_required > 0
+ end
+
+ # Acquire a connection by one of 1) immediately removing one
+ # from the queue of available connections, 2) creating a new
+ # connection if the pool is not at capacity, 3) waiting on the
+ # queue for a connection to become available.
+ #
+ # Raises:
+ # - ActiveRecord::ConnectionTimeoutError if a connection could not be acquired
+ #
+ #--
+ # Implementation detail: the connection returned by +acquire_connection+
+ # will already be "+connection.lease+ -ed" to the current thread.
+ def acquire_connection(checkout_timeout)
+ # NOTE: we rely on +@available.poll+ and +try_to_checkout_new_connection+ to
+ # +conn.lease+ the returned connection (and to do this in a +synchronized+
+ # section). This is not the cleanest implementation, as ideally we would
+ # <tt>synchronize { conn.lease }</tt> in this method, but by leaving it to +@available.poll+
+ # and +try_to_checkout_new_connection+ we can piggyback on +synchronize+ sections
+ # of the said methods and avoid an additional +synchronize+ overhead.
+ if conn = @available.poll || try_to_checkout_new_connection
+ conn
+ else
+ reap
+ @available.poll(checkout_timeout)
+ end
+ end
+
+ #--
+ # if owner_thread param is omitted, this must be called in synchronize block
+ def remove_connection_from_thread_cache(conn, owner_thread = conn.owner)
+ @thread_cached_conns.delete_pair(connection_cache_key(owner_thread), conn)
+ end
+ alias_method :release, :remove_connection_from_thread_cache
+
+ def new_connection
+ Base.send(spec.adapter_method, spec.config).tap do |conn|
+ conn.schema_cache = schema_cache.dup if schema_cache
+ end
+ end
+
+ # If the pool is not at a +@size+ limit, establish new connection. Connecting
+ # to the DB is done outside main synchronized section.
+ #--
+ # Implementation constraint: a newly established connection returned by this
+ # method must be in the +.leased+ state.
+ def try_to_checkout_new_connection
+ # first in synchronized section check if establishing new conns is allowed
+ # and increment @now_connecting, to prevent overstepping this pool's @size
+ # constraint
+ do_checkout = synchronize do
+ if @threads_blocking_new_connections.zero? && (@connections.size + @now_connecting) < @size
+ @now_connecting += 1
+ end
+ end
+ if do_checkout
+ begin
+ # if successfully incremented @now_connecting establish new connection
+ # outside of synchronized section
+ conn = checkout_new_connection
+ ensure
+ synchronize do
+ if conn
+ adopt_connection(conn)
+ # returned conn needs to be already leased
+ conn.lease
+ end
+ @now_connecting -= 1
+ end
+ end
+ end
+ end
+
+ def adopt_connection(conn)
+ conn.pool = self
+ @connections << conn
+ end
+
+ def checkout_new_connection
+ raise ConnectionNotEstablished unless @automatic_reconnect
+ new_connection
+ end
+
+ def checkout_and_verify(c)
+ c._run_checkout_callbacks do
+ c.verify!
+ end
+ c
+ rescue
+ remove c
+ c.disconnect!
+ raise
+ end
+ end
+
+ # ConnectionHandler is a collection of ConnectionPool objects. It is used
+ # for keeping separate connection pools that connect to different databases.
+ #
+ # For example, suppose that you have 5 models, with the following hierarchy:
+ #
+ # class Author < ActiveRecord::Base
+ # end
+ #
+ # class BankAccount < ActiveRecord::Base
+ # end
+ #
+ # class Book < ActiveRecord::Base
+ # establish_connection :library_db
+ # end
+ #
+ # class ScaryBook < Book
+ # end
+ #
+ # class GoodBook < Book
+ # end
+ #
+ # And a database.yml that looked like this:
+ #
+ # development:
+ # database: my_application
+ # host: localhost
+ #
+ # library_db:
+ # database: library
+ # host: some.library.org
+ #
+ # Your primary database in the development environment is "my_application"
+ # but the Book model connects to a separate database called "library_db"
+ # (this can even be a database on a different machine).
+ #
+ # Book, ScaryBook and GoodBook will all use the same connection pool to
+ # "library_db" while Author, BankAccount, and any other models you create
+ # will use the default connection pool to "my_application".
+ #
+ # The various connection pools are managed by a single instance of
+ # ConnectionHandler accessible via ActiveRecord::Base.connection_handler.
+ # All Active Record models use this handler to determine the connection pool that they
+ # should use.
+ #
+ # The ConnectionHandler class is not coupled with the Active models, as it has no knowledge
+ # about the model. The model needs to pass a specification name to the handler,
+ # in order to look up the correct connection pool.
+ class ConnectionHandler
+ def initialize
+ # These caches are keyed by spec.name (ConnectionSpecification#name).
+ @owner_to_pool = Concurrent::Map.new(initial_capacity: 2) do |h, k|
+ h[k] = Concurrent::Map.new(initial_capacity: 2)
+ end
+ end
+
+ def connection_pool_list
+ owner_to_pool.values.compact
+ end
+ alias :connection_pools :connection_pool_list
+
+ def establish_connection(config)
+ resolver = ConnectionSpecification::Resolver.new(Base.configurations)
+ spec = resolver.spec(config)
+
+ remove_connection(spec.name)
+
+ message_bus = ActiveSupport::Notifications.instrumenter
+ payload = {
+ connection_id: object_id
+ }
+ if spec
+ payload[:spec_name] = spec.name
+ payload[:config] = spec.config
+ end
+
+ message_bus.instrument("!connection.active_record", payload) do
+ owner_to_pool[spec.name] = ConnectionAdapters::ConnectionPool.new(spec)
+ end
+
+ owner_to_pool[spec.name]
+ end
+
+ # Returns true if there are any active connections among the connection
+ # pools that the ConnectionHandler is managing.
+ def active_connections?
+ connection_pool_list.any?(&:active_connection?)
+ end
+
+ # Returns any connections in use by the current thread back to the pool,
+ # and also returns connections to the pool cached by threads that are no
+ # longer alive.
+ def clear_active_connections!
+ connection_pool_list.each(&:release_connection)
+ end
+
+ # Clears the cache which maps classes.
+ #
+ # See ConnectionPool#clear_reloadable_connections! for details.
+ def clear_reloadable_connections!
+ connection_pool_list.each(&:clear_reloadable_connections!)
+ end
+
+ def clear_all_connections!
+ connection_pool_list.each(&:disconnect!)
+ end
+
+ # Locate the connection of the nearest super class. This can be an
+ # active or defined connection: if it is the latter, it will be
+ # opened and set as the active connection for the class it was defined
+ # for (not necessarily the current class).
+ def retrieve_connection(spec_name) #:nodoc:
+ pool = retrieve_connection_pool(spec_name)
+ raise ConnectionNotEstablished, "No connection pool with '#{spec_name}' found." unless pool
+ conn = pool.connection
+ raise ConnectionNotEstablished, "No connection for '#{spec_name}' in connection pool" unless conn
+ conn
+ end
+
+ # Returns true if a connection that's accessible to this class has
+ # already been opened.
+ def connected?(spec_name)
+ conn = retrieve_connection_pool(spec_name)
+ conn && conn.connected?
+ end
+
+ # Remove the connection for this class. This will close the active
+ # connection and the defined connection (if they exist). The result
+ # can be used as an argument for #establish_connection, for easily
+ # re-establishing the connection.
+ def remove_connection(spec_name)
+ if pool = owner_to_pool.delete(spec_name)
+ pool.automatic_reconnect = false
+ pool.disconnect!
+ pool.spec.config
+ end
+ end
+
+ # Retrieving the connection pool happens a lot, so we cache it in @owner_to_pool.
+ # This makes retrieving the connection pool O(1) once the process is warm.
+ # When a connection is established or removed, we invalidate the cache.
+ def retrieve_connection_pool(spec_name)
+ owner_to_pool.fetch(spec_name) do
+ # Check if a connection was previously established in an ancestor process,
+ # which may have been forked.
+ if ancestor_pool = pool_from_any_process_for(spec_name)
+ # A connection was established in an ancestor process that must have
+ # subsequently forked. We can't reuse the connection, but we can copy
+ # the specification and establish a new connection with it.
+ establish_connection(ancestor_pool.spec.to_hash).tap do |pool|
+ pool.schema_cache = ancestor_pool.schema_cache if ancestor_pool.schema_cache
+ end
+ else
+ owner_to_pool[spec_name] = nil
+ end
+ end
+ end
+
+ private
+
+ def owner_to_pool
+ @owner_to_pool[Process.pid]
+ end
+
+ def pool_from_any_process_for(spec_name)
+ owner_to_pool = @owner_to_pool.values.reverse.find { |v| v[spec_name] }
+ owner_to_pool && owner_to_pool[spec_name]
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb b/activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb
new file mode 100644
index 0000000000..7a9e7add24
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters # :nodoc:
+ module DatabaseLimits
+ # Returns the maximum length of a table alias.
+ def table_alias_length
+ 255
+ end
+
+ # Returns the maximum length of a column name.
+ def column_name_length
+ 64
+ end
+
+ # Returns the maximum length of a table name.
+ def table_name_length
+ 64
+ end
+
+ # Returns the maximum allowed length for an index name. This
+ # limit is enforced by \Rails and is less than or equal to
+ # #index_name_length. The gap between
+ # #index_name_length is to allow internal \Rails
+ # operations to use prefixes in temporary operations.
+ def allowed_index_name_length
+ index_name_length
+ end
+
+ # Returns the maximum length of an index name.
+ def index_name_length
+ 64
+ end
+
+ # Returns the maximum number of columns per table.
+ def columns_per_table
+ 1024
+ end
+
+ # Returns the maximum number of indexes per table.
+ def indexes_per_table
+ 16
+ end
+
+ # Returns the maximum number of columns in a multicolumn index.
+ def columns_per_multicolumn_index
+ 16
+ end
+
+ # Returns the maximum number of elements in an IN (x,y,z) clause.
+ # +nil+ means no limit.
+ def in_clause_length
+ nil
+ end
+
+ # Returns the maximum length of an SQL query.
+ def sql_query_length
+ 1048575
+ end
+
+ # Returns maximum number of joins in a single query.
+ def joins_per_query
+ 256
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb b/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb
new file mode 100644
index 0000000000..314a35207b
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb
@@ -0,0 +1,485 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters # :nodoc:
+ module DatabaseStatements
+ def initialize
+ super
+ reset_transaction
+ end
+
+ # Converts an arel AST to SQL
+ def to_sql(arel_or_sql_string, binds = [])
+ if arel_or_sql_string.respond_to?(:ast)
+ unless binds.empty?
+ raise "Passing bind parameters with an arel AST is forbidden. " \
+ "The values must be stored on the AST directly"
+ end
+ sql, binds = visitor.accept(arel_or_sql_string.ast, collector).value
+ [sql.freeze, binds || []]
+ else
+ [arel_or_sql_string.dup.freeze, binds]
+ end
+ end
+
+ # This is used in the StatementCache object. It returns an object that
+ # can be used to query the database repeatedly.
+ def cacheable_query(klass, arel) # :nodoc:
+ if prepared_statements
+ sql, binds = visitor.accept(arel.ast, collector).value
+ query = klass.query(sql)
+ else
+ collector = PartialQueryCollector.new
+ parts, binds = visitor.accept(arel.ast, collector).value
+ query = klass.partial_query(parts)
+ end
+ [query, binds]
+ end
+
+ # Returns an ActiveRecord::Result instance.
+ def select_all(arel, name = nil, binds = [], preparable: nil)
+ arel = arel_from_relation(arel)
+ sql, binds = to_sql(arel, binds)
+ if !prepared_statements || (arel.is_a?(String) && preparable.nil?)
+ preparable = false
+ else
+ preparable = visitor.preparable
+ end
+ if prepared_statements && preparable
+ select_prepared(sql, name, binds)
+ else
+ select(sql, name, binds)
+ end
+ end
+
+ # Returns a record hash with the column names as keys and column values
+ # as values.
+ def select_one(arel, name = nil, binds = [])
+ select_all(arel, name, binds).first
+ end
+
+ # Returns a single value from a record
+ def select_value(arel, name = nil, binds = [])
+ single_value_from_rows(select_rows(arel, name, binds))
+ end
+
+ # Returns an array of the values of the first column in a select:
+ # select_values("SELECT id FROM companies LIMIT 3") => [1,2,3]
+ def select_values(arel, name = nil, binds = [])
+ select_rows(arel, name, binds).map(&:first)
+ end
+
+ # Returns an array of arrays containing the field values.
+ # Order is the same as that returned by +columns+.
+ def select_rows(arel, name = nil, binds = [])
+ select_all(arel, name, binds).rows
+ end
+
+ def query_value(sql, name = nil) # :nodoc:
+ single_value_from_rows(query(sql, name))
+ end
+
+ def query_values(sql, name = nil) # :nodoc:
+ query(sql, name).map(&:first)
+ end
+
+ def query(sql, name = nil) # :nodoc:
+ exec_query(sql, name).rows
+ end
+
+ # Executes the SQL statement in the context of this connection and returns
+ # the raw result from the connection adapter.
+ # Note: depending on your database connector, the result returned by this
+ # method may be manually memory managed. Consider using the exec_query
+ # wrapper instead.
+ def execute(sql, name = nil)
+ raise NotImplementedError
+ end
+
+ # Executes +sql+ statement in the context of this connection using
+ # +binds+ as the bind substitutes. +name+ is logged along with
+ # the executed +sql+ statement.
+ def exec_query(sql, name = "SQL", binds = [], prepare: false)
+ raise NotImplementedError
+ end
+
+ # Executes insert +sql+ statement in the context of this connection using
+ # +binds+ as the bind substitutes. +name+ is logged along with
+ # the executed +sql+ statement.
+ def exec_insert(sql, name = nil, binds = [], pk = nil, sequence_name = nil)
+ sql, binds = sql_for_insert(sql, pk, nil, sequence_name, binds)
+ exec_query(sql, name, binds)
+ end
+
+ # Executes delete +sql+ statement in the context of this connection using
+ # +binds+ as the bind substitutes. +name+ is logged along with
+ # the executed +sql+ statement.
+ def exec_delete(sql, name = nil, binds = [])
+ exec_query(sql, name, binds)
+ end
+
+ # Executes the truncate statement.
+ def truncate(table_name, name = nil)
+ raise NotImplementedError
+ end
+
+ # Executes update +sql+ statement in the context of this connection using
+ # +binds+ as the bind substitutes. +name+ is logged along with
+ # the executed +sql+ statement.
+ def exec_update(sql, name = nil, binds = [])
+ exec_query(sql, name, binds)
+ end
+
+ # Executes an INSERT query and returns the new record's ID
+ #
+ # +id_value+ will be returned unless the value is +nil+, in
+ # which case the database will attempt to calculate the last inserted
+ # id and return that value.
+ #
+ # If the next id was calculated in advance (as in Oracle), it should be
+ # passed in as +id_value+.
+ def insert(arel, name = nil, pk = nil, id_value = nil, sequence_name = nil)
+ sql, binds = to_sql(arel)
+ value = exec_insert(sql, name, binds, pk, sequence_name)
+ id_value || last_inserted_id(value)
+ end
+ alias create insert
+
+ # Executes the update statement and returns the number of rows affected.
+ def update(arel, name = nil)
+ sql, binds = to_sql(arel)
+ exec_update(sql, name, binds)
+ end
+
+ # Executes the delete statement and returns the number of rows affected.
+ def delete(arel, name = nil)
+ sql, binds = to_sql(arel)
+ exec_delete(sql, name, binds)
+ end
+
+ # Returns +true+ when the connection adapter supports prepared statement
+ # caching, otherwise returns +false+
+ def supports_statement_cache? # :nodoc:
+ true
+ end
+ deprecate :supports_statement_cache?
+
+ # Runs the given block in a database transaction, and returns the result
+ # of the block.
+ #
+ # == Nested transactions support
+ #
+ # Most databases don't support true nested transactions. At the time of
+ # writing, the only database that supports true nested transactions that
+ # we're aware of, is MS-SQL.
+ #
+ # In order to get around this problem, #transaction will emulate the effect
+ # of nested transactions, by using savepoints:
+ # http://dev.mysql.com/doc/refman/5.7/en/savepoint.html
+ # Savepoints are supported by MySQL and PostgreSQL. SQLite3 version >= '3.6.8'
+ # supports savepoints.
+ #
+ # It is safe to call this method if a database transaction is already open,
+ # i.e. if #transaction is called within another #transaction block. In case
+ # of a nested call, #transaction will behave as follows:
+ #
+ # - The block will be run without doing anything. All database statements
+ # that happen within the block are effectively appended to the already
+ # open database transaction.
+ # - However, if +:requires_new+ is set, the block will be wrapped in a
+ # database savepoint acting as a sub-transaction.
+ #
+ # === Caveats
+ #
+ # MySQL doesn't support DDL transactions. If you perform a DDL operation,
+ # then any created savepoints will be automatically released. For example,
+ # if you've created a savepoint, then you execute a CREATE TABLE statement,
+ # then the savepoint that was created will be automatically released.
+ #
+ # This means that, on MySQL, you shouldn't execute DDL operations inside
+ # a #transaction call that you know might create a savepoint. Otherwise,
+ # #transaction will raise exceptions when it tries to release the
+ # already-automatically-released savepoints:
+ #
+ # Model.connection.transaction do # BEGIN
+ # Model.connection.transaction(requires_new: true) do # CREATE SAVEPOINT active_record_1
+ # Model.connection.create_table(...)
+ # # active_record_1 now automatically released
+ # end # RELEASE SAVEPOINT active_record_1 <--- BOOM! database error!
+ # end
+ #
+ # == Transaction isolation
+ #
+ # If your database supports setting the isolation level for a transaction, you can set
+ # it like so:
+ #
+ # Post.transaction(isolation: :serializable) do
+ # # ...
+ # end
+ #
+ # Valid isolation levels are:
+ #
+ # * <tt>:read_uncommitted</tt>
+ # * <tt>:read_committed</tt>
+ # * <tt>:repeatable_read</tt>
+ # * <tt>:serializable</tt>
+ #
+ # You should consult the documentation for your database to understand the
+ # semantics of these different levels:
+ #
+ # * https://www.postgresql.org/docs/current/static/transaction-iso.html
+ # * https://dev.mysql.com/doc/refman/5.7/en/set-transaction.html
+ #
+ # An ActiveRecord::TransactionIsolationError will be raised if:
+ #
+ # * The adapter does not support setting the isolation level
+ # * You are joining an existing open transaction
+ # * You are creating a nested (savepoint) transaction
+ #
+ # The mysql2 and postgresql adapters support setting the transaction
+ # isolation level.
+ def transaction(requires_new: nil, isolation: nil, joinable: true)
+ if !requires_new && current_transaction.joinable?
+ if isolation
+ raise ActiveRecord::TransactionIsolationError, "cannot set isolation when joining a transaction"
+ end
+ yield
+ else
+ transaction_manager.within_new_transaction(isolation: isolation, joinable: joinable) { yield }
+ end
+ rescue ActiveRecord::Rollback
+ # rollbacks are silently swallowed
+ end
+
+ attr_reader :transaction_manager #:nodoc:
+
+ delegate :within_new_transaction, :open_transactions, :current_transaction, :begin_transaction, :commit_transaction, :rollback_transaction, to: :transaction_manager
+
+ def transaction_open?
+ current_transaction.open?
+ end
+
+ def reset_transaction #:nodoc:
+ @transaction_manager = ConnectionAdapters::TransactionManager.new(self)
+ end
+
+ # Register a record with the current transaction so that its after_commit and after_rollback callbacks
+ # can be called.
+ def add_transaction_record(record)
+ current_transaction.add_record(record)
+ end
+
+ def transaction_state
+ current_transaction.state
+ end
+
+ # Begins the transaction (and turns off auto-committing).
+ def begin_db_transaction() end
+
+ def transaction_isolation_levels
+ {
+ read_uncommitted: "READ UNCOMMITTED",
+ read_committed: "READ COMMITTED",
+ repeatable_read: "REPEATABLE READ",
+ serializable: "SERIALIZABLE"
+ }
+ end
+
+ # Begins the transaction with the isolation level set. Raises an error by
+ # default; adapters that support setting the isolation level should implement
+ # this method.
+ def begin_isolated_db_transaction(isolation)
+ raise ActiveRecord::TransactionIsolationError, "adapter does not support setting transaction isolation"
+ end
+
+ # Commits the transaction (and turns on auto-committing).
+ def commit_db_transaction() end
+
+ # Rolls back the transaction (and turns on auto-committing). Must be
+ # done if the transaction block raises an exception or returns false.
+ def rollback_db_transaction
+ exec_rollback_db_transaction
+ end
+
+ def exec_rollback_db_transaction() end #:nodoc:
+
+ def rollback_to_savepoint(name = nil)
+ exec_rollback_to_savepoint(name)
+ end
+
+ def default_sequence_name(table, column)
+ nil
+ end
+
+ # Set the sequence to the max value of the table's column.
+ def reset_sequence!(table, column, sequence = nil)
+ # Do nothing by default. Implement for PostgreSQL, Oracle, ...
+ end
+
+ # Inserts the given fixture into the table. Overridden in adapters that require
+ # something beyond a simple insert (eg. Oracle).
+ # Most of adapters should implement `insert_fixtures` that leverages bulk SQL insert.
+ # We keep this method to provide fallback
+ # for databases like sqlite that do not support bulk inserts.
+ def insert_fixture(fixture, table_name)
+ fixture = fixture.stringify_keys
+
+ columns = schema_cache.columns_hash(table_name)
+ binds = fixture.map do |name, value|
+ if column = columns[name]
+ type = lookup_cast_type_from_column(column)
+ Relation::QueryAttribute.new(name, value, type)
+ else
+ raise Fixture::FixtureError, %(table "#{table_name}" has no column named #{name.inspect}.)
+ end
+ end
+
+ table = Arel::Table.new(table_name)
+
+ values = binds.map do |bind|
+ value = with_yaml_fallback(bind.value_for_database)
+ [table[bind.name], value]
+ end
+
+ manager = Arel::InsertManager.new
+ manager.into(table)
+ manager.insert(values)
+ execute manager.to_sql, "Fixture Insert"
+ end
+
+ # Inserts a set of fixtures into the table. Overridden in adapters that require
+ # something beyond a simple insert (eg. Oracle).
+ def insert_fixtures(fixtures, table_name)
+ return if fixtures.empty?
+
+ columns = schema_cache.columns_hash(table_name)
+
+ values = fixtures.map do |fixture|
+ fixture = fixture.stringify_keys
+
+ unknown_columns = fixture.keys - columns.keys
+ if unknown_columns.any?
+ raise Fixture::FixtureError, %(table "#{table_name}" has no columns named #{unknown_columns.map(&:inspect).join(', ')}.)
+ end
+
+ columns.map do |name, column|
+ if fixture.key?(name)
+ type = lookup_cast_type_from_column(column)
+ bind = Relation::QueryAttribute.new(name, fixture[name], type)
+ with_yaml_fallback(bind.value_for_database)
+ else
+ Arel.sql("DEFAULT")
+ end
+ end
+ end
+
+ table = Arel::Table.new(table_name)
+ manager = Arel::InsertManager.new
+ manager.into(table)
+ columns.each_key { |column| manager.columns << table[column] }
+ manager.values = manager.create_values_list(values)
+ execute manager.to_sql, "Fixtures Insert"
+ end
+
+ def empty_insert_statement_value
+ "DEFAULT VALUES"
+ end
+
+ # Sanitizes the given LIMIT parameter in order to prevent SQL injection.
+ #
+ # The +limit+ may be anything that can evaluate to a string via #to_s. It
+ # should look like an integer, or an Arel SQL literal.
+ #
+ # Returns Integer and Arel::Nodes::SqlLiteral limits as is.
+ def sanitize_limit(limit)
+ if limit.is_a?(Integer) || limit.is_a?(Arel::Nodes::SqlLiteral)
+ limit
+ else
+ Integer(limit)
+ end
+ end
+
+ # The default strategy for an UPDATE with joins is to use a subquery. This doesn't work
+ # on MySQL (even when aliasing the tables), but MySQL allows using JOIN directly in
+ # an UPDATE statement, so in the MySQL adapters we redefine this to do that.
+ def join_to_update(update, select, key) # :nodoc:
+ subselect = subquery_for(key, select)
+
+ update.where key.in(subselect)
+ end
+ alias join_to_delete join_to_update
+
+ private
+
+ # Returns a subquery for the given key using the join information.
+ def subquery_for(key, select)
+ subselect = select.clone
+ subselect.projections = [key]
+ subselect
+ end
+
+ # Returns an ActiveRecord::Result instance.
+ def select(sql, name = nil, binds = [])
+ exec_query(sql, name, binds, prepare: false)
+ end
+
+ def select_prepared(sql, name = nil, binds = [])
+ exec_query(sql, name, binds, prepare: true)
+ end
+
+ def sql_for_insert(sql, pk, id_value, sequence_name, binds)
+ [sql, binds]
+ end
+
+ def last_inserted_id(result)
+ single_value_from_rows(result.rows)
+ end
+
+ def single_value_from_rows(rows)
+ row = rows.first
+ row && row.first
+ end
+
+ def arel_from_relation(relation)
+ if relation.is_a?(Relation)
+ relation.arel
+ else
+ relation
+ end
+ end
+
+ # Fixture value is quoted by Arel, however scalar values
+ # are not quotable. In this case we want to convert
+ # the column value to YAML.
+ def with_yaml_fallback(value)
+ if value.is_a?(Hash) || value.is_a?(Array)
+ YAML.dump(value)
+ else
+ value
+ end
+ end
+
+ class PartialQueryCollector
+ def initialize
+ @parts = []
+ @binds = []
+ end
+
+ def << str
+ @parts << str
+ self
+ end
+
+ def add_bind obj
+ @binds << obj
+ @parts << Arel::Nodes::BindParam.new(1)
+ self
+ end
+
+ def value
+ [@parts, @binds]
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb b/activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb
new file mode 100644
index 0000000000..ecf5201d12
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb
@@ -0,0 +1,138 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters # :nodoc:
+ module QueryCache
+ class << self
+ def included(base) #:nodoc:
+ dirties_query_cache base, :insert, :update, :delete, :rollback_to_savepoint, :rollback_db_transaction
+
+ base.set_callback :checkout, :after, :configure_query_cache!
+ base.set_callback :checkin, :after, :disable_query_cache!
+ end
+
+ def dirties_query_cache(base, *method_names)
+ method_names.each do |method_name|
+ base.class_eval <<-end_code, __FILE__, __LINE__ + 1
+ def #{method_name}(*)
+ clear_query_cache if @query_cache_enabled
+ super
+ end
+ end_code
+ end
+ end
+ end
+
+ module ConnectionPoolConfiguration
+ def initialize(*)
+ super
+ @query_cache_enabled = Concurrent::Map.new { false }
+ end
+
+ def enable_query_cache!
+ @query_cache_enabled[connection_cache_key(Thread.current)] = true
+ connection.enable_query_cache! if active_connection?
+ end
+
+ def disable_query_cache!
+ @query_cache_enabled.delete connection_cache_key(Thread.current)
+ connection.disable_query_cache! if active_connection?
+ end
+
+ def query_cache_enabled
+ @query_cache_enabled[connection_cache_key(Thread.current)]
+ end
+ end
+
+ attr_reader :query_cache, :query_cache_enabled
+
+ def initialize(*)
+ super
+ @query_cache = Hash.new { |h, sql| h[sql] = {} }
+ @query_cache_enabled = false
+ end
+
+ # Enable the query cache within the block.
+ def cache
+ old, @query_cache_enabled = @query_cache_enabled, true
+ yield
+ ensure
+ @query_cache_enabled = old
+ clear_query_cache unless @query_cache_enabled
+ end
+
+ def enable_query_cache!
+ @query_cache_enabled = true
+ end
+
+ def disable_query_cache!
+ @query_cache_enabled = false
+ clear_query_cache
+ end
+
+ # Disable the query cache within the block.
+ def uncached
+ old, @query_cache_enabled = @query_cache_enabled, false
+ yield
+ ensure
+ @query_cache_enabled = old
+ end
+
+ # Clears the query cache.
+ #
+ # One reason you may wish to call this method explicitly is between queries
+ # that ask the database to randomize results. Otherwise the cache would see
+ # the same SQL query and repeatedly return the same result each time, silently
+ # undermining the randomness you were expecting.
+ def clear_query_cache
+ @lock.synchronize do
+ @query_cache.clear
+ end
+ end
+
+ def select_all(arel, name = nil, binds = [], preparable: nil)
+ if @query_cache_enabled && !locked?(arel)
+ arel = arel_from_relation(arel)
+ sql, binds = to_sql(arel, binds)
+ cache_sql(sql, name, binds) { super(sql, name, binds, preparable: preparable) }
+ else
+ super
+ end
+ end
+
+ private
+
+ def cache_sql(sql, name, binds)
+ @lock.synchronize do
+ result =
+ if @query_cache[sql].key?(binds)
+ ActiveSupport::Notifications.instrument(
+ "sql.active_record",
+ sql: sql,
+ binds: binds,
+ type_casted_binds: -> { type_casted_binds(binds) },
+ name: name,
+ connection_id: object_id,
+ cached: true,
+ )
+ @query_cache[sql][binds]
+ else
+ @query_cache[sql][binds] = yield
+ end
+ result.dup
+ end
+ end
+
+ # If arel is locked this is a SELECT ... FOR UPDATE or somesuch. Such
+ # queries should not be cached.
+ def locked?(arel)
+ arel = arel.arel if arel.is_a?(Relation)
+ arel.respond_to?(:locked) && arel.locked
+ end
+
+ def configure_query_cache!
+ enable_query_cache! if pool.query_cache_enabled
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb b/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb
new file mode 100644
index 0000000000..7b83bc319c
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb
@@ -0,0 +1,216 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/big_decimal/conversions"
+require "active_support/multibyte/chars"
+
+module ActiveRecord
+ module ConnectionAdapters # :nodoc:
+ module Quoting
+ # Quotes the column value to help prevent
+ # {SQL injection attacks}[http://en.wikipedia.org/wiki/SQL_injection].
+ def quote(value)
+ value = id_value_for_database(value) if value.is_a?(Base)
+
+ if value.respond_to?(:quoted_id)
+ at = value.method(:quoted_id).source_location
+ at &&= " at %s:%d" % at
+
+ owner = value.method(:quoted_id).owner.to_s
+ klass = value.class.to_s
+ klass += "(#{owner})" unless owner == klass
+
+ ActiveSupport::Deprecation.warn \
+ "Defining #quoted_id is deprecated and will be ignored in Rails 5.2. (defined on #{klass}#{at})"
+ return value.quoted_id
+ end
+
+ if value.respond_to?(:value_for_database)
+ value = value.value_for_database
+ end
+
+ _quote(value)
+ end
+
+ # Cast a +value+ to a type that the database understands. For example,
+ # SQLite does not understand dates, so this method will convert a Date
+ # to a String.
+ def type_cast(value, column = nil)
+ value = id_value_for_database(value) if value.is_a?(Base)
+
+ if value.respond_to?(:quoted_id) && value.respond_to?(:id)
+ return value.id
+ end
+
+ if column
+ value = type_cast_from_column(column, value)
+ end
+
+ _type_cast(value)
+ rescue TypeError
+ to_type = column ? " to #{column.type}" : ""
+ raise TypeError, "can't cast #{value.class}#{to_type}"
+ end
+
+ # If you are having to call this function, you are likely doing something
+ # wrong. The column does not have sufficient type information if the user
+ # provided a custom type on the class level either explicitly (via
+ # Attributes::ClassMethods#attribute) or implicitly (via
+ # AttributeMethods::Serialization::ClassMethods#serialize, +time_zone_aware_attributes+).
+ # In almost all cases, the sql type should only be used to change quoting behavior, when the primitive to
+ # represent the type doesn't sufficiently reflect the differences
+ # (varchar vs binary) for example. The type used to get this primitive
+ # should have been provided before reaching the connection adapter.
+ def type_cast_from_column(column, value) # :nodoc:
+ if column
+ type = lookup_cast_type_from_column(column)
+ type.serialize(value)
+ else
+ value
+ end
+ end
+
+ # See docs for #type_cast_from_column
+ def lookup_cast_type_from_column(column) # :nodoc:
+ lookup_cast_type(column.sql_type)
+ end
+
+ # Quotes a string, escaping any ' (single quote) and \ (backslash)
+ # characters.
+ def quote_string(s)
+ s.gsub('\\'.freeze, '\&\&'.freeze).gsub("'".freeze, "''".freeze) # ' (for ruby-mode)
+ end
+
+ # Quotes the column name. Defaults to no quoting.
+ def quote_column_name(column_name)
+ column_name.to_s
+ end
+
+ # Quotes the table name. Defaults to column name quoting.
+ def quote_table_name(table_name)
+ quote_column_name(table_name)
+ end
+
+ # Override to return the quoted table name for assignment. Defaults to
+ # table quoting.
+ #
+ # This works for mysql2 where table.column can be used to
+ # resolve ambiguity.
+ #
+ # We override this in the sqlite3 and postgresql adapters to use only
+ # the column name (as per syntax requirements).
+ def quote_table_name_for_assignment(table, attr)
+ quote_table_name("#{table}.#{attr}")
+ end
+
+ def quote_default_expression(value, column) # :nodoc:
+ if value.is_a?(Proc)
+ value.call
+ else
+ value = lookup_cast_type(column.sql_type).serialize(value)
+ quote(value)
+ end
+ end
+
+ def quoted_true
+ "TRUE".freeze
+ end
+
+ def unquoted_true
+ true
+ end
+
+ def quoted_false
+ "FALSE".freeze
+ end
+
+ def unquoted_false
+ false
+ end
+
+ # Quote date/time values for use in SQL input. Includes microseconds
+ # if the value is a Time responding to usec.
+ def quoted_date(value)
+ if value.acts_like?(:time)
+ zone_conversion_method = ActiveRecord::Base.default_timezone == :utc ? :getutc : :getlocal
+
+ if value.respond_to?(zone_conversion_method)
+ value = value.send(zone_conversion_method)
+ end
+ end
+
+ result = value.to_s(:db)
+ if value.respond_to?(:usec) && value.usec > 0
+ "#{result}.#{sprintf("%06d", value.usec)}"
+ else
+ result
+ end
+ end
+
+ def quoted_time(value) # :nodoc:
+ quoted_date(value).sub(/\A2000-01-01 /, "")
+ end
+
+ def quoted_binary(value) # :nodoc:
+ "'#{quote_string(value.to_s)}'"
+ end
+
+ def type_casted_binds(binds) # :nodoc:
+ if binds.first.is_a?(Array)
+ binds.map { |column, value| type_cast(value, column) }
+ else
+ binds.map { |attr| type_cast(attr.value_for_database) }
+ end
+ end
+
+ private
+ def lookup_cast_type(sql_type)
+ type_map.lookup(sql_type)
+ end
+
+ def id_value_for_database(value)
+ if primary_key = value.class.primary_key
+ value.instance_variable_get(:@attributes)[primary_key].value_for_database
+ end
+ end
+
+ def types_which_need_no_typecasting
+ [nil, Numeric, String]
+ end
+
+ def _quote(value)
+ case value
+ when String, ActiveSupport::Multibyte::Chars
+ "'#{quote_string(value.to_s)}'"
+ when true then quoted_true
+ when false then quoted_false
+ when nil then "NULL"
+ # BigDecimals need to be put in a non-normalized form and quoted.
+ when BigDecimal then value.to_s("F")
+ when Numeric, ActiveSupport::Duration then value.to_s
+ when Type::Binary::Data then quoted_binary(value)
+ when Type::Time::Value then "'#{quoted_time(value)}'"
+ when Date, Time then "'#{quoted_date(value)}'"
+ when Symbol then "'#{quote_string(value.to_s)}'"
+ when Class then "'#{value}'"
+ else raise TypeError, "can't quote #{value.class.name}"
+ end
+ end
+
+ def _type_cast(value)
+ case value
+ when Symbol, ActiveSupport::Multibyte::Chars, Type::Binary::Data
+ value.to_s
+ when true then unquoted_true
+ when false then unquoted_false
+ # BigDecimals need to be put in a non-normalized form and quoted.
+ when BigDecimal then value.to_s("F")
+ when Type::Time::Value then quoted_time(value)
+ when Date, Time then quoted_date(value)
+ when *types_which_need_no_typecasting
+ value
+ else raise TypeError
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/savepoints.rb b/activerecord/lib/active_record/connection_adapters/abstract/savepoints.rb
new file mode 100644
index 0000000000..52a796b926
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/savepoints.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module Savepoints
+ def current_savepoint_name
+ current_transaction.savepoint_name
+ end
+
+ def create_savepoint(name = current_savepoint_name)
+ execute("SAVEPOINT #{name}")
+ end
+
+ def exec_rollback_to_savepoint(name = current_savepoint_name)
+ execute("ROLLBACK TO SAVEPOINT #{name}")
+ end
+
+ def release_savepoint(name = current_savepoint_name)
+ execute("RELEASE SAVEPOINT #{name}")
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb
new file mode 100644
index 0000000000..8bf3879a4c
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb
@@ -0,0 +1,145 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/string/strip"
+
+module ActiveRecord
+ module ConnectionAdapters
+ class AbstractAdapter
+ class SchemaCreation # :nodoc:
+ def initialize(conn)
+ @conn = conn
+ @cache = {}
+ end
+
+ def accept(o)
+ m = @cache[o.class] ||= "visit_#{o.class.name.split('::').last}"
+ send m, o
+ end
+
+ delegate :quote_column_name, :quote_table_name, :quote_default_expression, :type_to_sql,
+ :options_include_default?, :supports_indexes_in_create?, :supports_foreign_keys_in_create?, :foreign_key_options, to: :@conn
+ private :quote_column_name, :quote_table_name, :quote_default_expression, :type_to_sql,
+ :options_include_default?, :supports_indexes_in_create?, :supports_foreign_keys_in_create?, :foreign_key_options
+
+ private
+
+ def visit_AlterTable(o)
+ sql = "ALTER TABLE #{quote_table_name(o.name)} ".dup
+ sql << o.adds.map { |col| accept col }.join(" ")
+ sql << o.foreign_key_adds.map { |fk| visit_AddForeignKey fk }.join(" ")
+ sql << o.foreign_key_drops.map { |fk| visit_DropForeignKey fk }.join(" ")
+ end
+
+ def visit_ColumnDefinition(o)
+ o.sql_type = type_to_sql(o.type, o.options)
+ column_sql = "#{quote_column_name(o.name)} #{o.sql_type}".dup
+ add_column_options!(column_sql, column_options(o)) unless o.type == :primary_key
+ column_sql
+ end
+
+ def visit_AddColumnDefinition(o)
+ "ADD #{accept(o.column)}".dup
+ end
+
+ def visit_TableDefinition(o)
+ create_sql = "CREATE#{' TEMPORARY' if o.temporary} TABLE #{quote_table_name(o.name)} ".dup
+
+ statements = o.columns.map { |c| accept c }
+ statements << accept(o.primary_keys) if o.primary_keys
+
+ if supports_indexes_in_create?
+ statements.concat(o.indexes.map { |column_name, options| index_in_create(o.name, column_name, options) })
+ end
+
+ if supports_foreign_keys_in_create?
+ statements.concat(o.foreign_keys.map { |to_table, options| foreign_key_in_create(o.name, to_table, options) })
+ end
+
+ create_sql << "(#{statements.join(', ')})" if statements.present?
+ add_table_options!(create_sql, table_options(o))
+ create_sql << " AS #{to_sql(o.as)}" if o.as
+ create_sql
+ end
+
+ def visit_PrimaryKeyDefinition(o)
+ "PRIMARY KEY (#{o.name.join(', ')})"
+ end
+
+ def visit_ForeignKeyDefinition(o)
+ sql = <<-SQL.strip_heredoc
+ CONSTRAINT #{quote_column_name(o.name)}
+ FOREIGN KEY (#{quote_column_name(o.column)})
+ REFERENCES #{quote_table_name(o.to_table)} (#{quote_column_name(o.primary_key)})
+ SQL
+ sql << " #{action_sql('DELETE', o.on_delete)}" if o.on_delete
+ sql << " #{action_sql('UPDATE', o.on_update)}" if o.on_update
+ sql
+ end
+
+ def visit_AddForeignKey(o)
+ "ADD #{accept(o)}"
+ end
+
+ def visit_DropForeignKey(name)
+ "DROP CONSTRAINT #{quote_column_name(name)}"
+ end
+
+ def table_options(o)
+ table_options = {}
+ table_options[:comment] = o.comment
+ table_options[:options] = o.options
+ table_options
+ end
+
+ def add_table_options!(create_sql, options)
+ if options_sql = options[:options]
+ create_sql << " #{options_sql}"
+ end
+ end
+
+ def column_options(o)
+ o.options.merge(column: o)
+ end
+
+ def add_column_options!(sql, options)
+ sql << " DEFAULT #{quote_default_expression(options[:default], options[:column])}" if options_include_default?(options)
+ # must explicitly check for :null to allow change_column to work on migrations
+ if options[:null] == false
+ sql << " NOT NULL"
+ end
+ if options[:auto_increment] == true
+ sql << " AUTO_INCREMENT"
+ end
+ if options[:primary_key] == true
+ sql << " PRIMARY KEY"
+ end
+ sql
+ end
+
+ def to_sql(sql)
+ sql = sql.to_sql if sql.respond_to?(:to_sql)
+ sql
+ end
+
+ def foreign_key_in_create(from_table, to_table, options)
+ options = foreign_key_options(from_table, to_table, options)
+ accept ForeignKeyDefinition.new(from_table, to_table, options)
+ end
+
+ def action_sql(action, dependency)
+ case dependency
+ when :nullify then "ON #{action} SET NULL"
+ when :cascade then "ON #{action} CASCADE"
+ when :restrict then "ON #{action} RESTRICT"
+ else
+ raise ArgumentError, <<-MSG.strip_heredoc
+ '#{dependency}' is not supported for :on_update or :on_delete.
+ Supported values are: :nullify, :cascade, :restrict
+ MSG
+ end
+ end
+ end
+ end
+ SchemaCreation = AbstractAdapter::SchemaCreation # :nodoc:
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb
new file mode 100644
index 0000000000..3b2c51ef94
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb
@@ -0,0 +1,654 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters #:nodoc:
+ # Abstract representation of an index definition on a table. Instances of
+ # this type are typically created and returned by methods in database
+ # adapters. e.g. ActiveRecord::ConnectionAdapters::MySQL::SchemaStatements#indexes
+ class IndexDefinition # :nodoc:
+ attr_reader :table, :name, :unique, :columns, :lengths, :orders, :where, :type, :using, :comment
+
+ def initialize(
+ table, name,
+ unique = false,
+ columns = [],
+ lengths: {},
+ orders: {},
+ where: nil,
+ type: nil,
+ using: nil,
+ comment: nil
+ )
+ @table = table
+ @name = name
+ @unique = unique
+ @columns = columns
+ @lengths = lengths
+ @orders = orders
+ @where = where
+ @type = type
+ @using = using
+ @comment = comment
+ end
+ end
+
+ # Abstract representation of a column definition. Instances of this type
+ # are typically created by methods in TableDefinition, and added to the
+ # +columns+ attribute of said TableDefinition object, in order to be used
+ # for generating a number of table creation or table changing SQL statements.
+ ColumnDefinition = Struct.new(:name, :type, :options, :sql_type) do # :nodoc:
+ def primary_key?
+ options[:primary_key]
+ end
+
+ [:limit, :precision, :scale, :default, :null, :collation, :comment].each do |option_name|
+ module_eval <<-CODE, __FILE__, __LINE__ + 1
+ def #{option_name}
+ options[:#{option_name}]
+ end
+
+ def #{option_name}=(value)
+ options[:#{option_name}] = value
+ end
+ CODE
+ end
+ end
+
+ AddColumnDefinition = Struct.new(:column) # :nodoc:
+
+ ChangeColumnDefinition = Struct.new(:column, :name) #:nodoc:
+
+ PrimaryKeyDefinition = Struct.new(:name) # :nodoc:
+
+ ForeignKeyDefinition = Struct.new(:from_table, :to_table, :options) do #:nodoc:
+ def name
+ options[:name]
+ end
+
+ def column
+ options[:column]
+ end
+
+ def primary_key
+ options[:primary_key] || default_primary_key
+ end
+
+ def on_delete
+ options[:on_delete]
+ end
+
+ def on_update
+ options[:on_update]
+ end
+
+ def custom_primary_key?
+ options[:primary_key] != default_primary_key
+ end
+
+ def defined_for?(to_table_ord = nil, to_table: nil, **options)
+ if to_table_ord
+ self.to_table == to_table_ord.to_s
+ else
+ (to_table.nil? || to_table.to_s == self.to_table) &&
+ options.all? { |k, v| self.options[k].to_s == v.to_s }
+ end
+ end
+
+ private
+ def default_primary_key
+ "id"
+ end
+ end
+
+ class ReferenceDefinition # :nodoc:
+ def initialize(
+ name,
+ polymorphic: false,
+ index: true,
+ foreign_key: false,
+ type: :bigint,
+ **options
+ )
+ @name = name
+ @polymorphic = polymorphic
+ @index = index
+ @foreign_key = foreign_key
+ @type = type
+ @options = options
+
+ if polymorphic && foreign_key
+ raise ArgumentError, "Cannot add a foreign key to a polymorphic relation"
+ end
+ end
+
+ def add_to(table)
+ columns.each do |column_options|
+ table.column(*column_options)
+ end
+
+ if index
+ table.index(column_names, index_options)
+ end
+
+ if foreign_key
+ table.foreign_key(foreign_table_name, foreign_key_options)
+ end
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+
+ attr_reader :name, :polymorphic, :index, :foreign_key, :type, :options
+
+ private
+
+ def as_options(value)
+ value.is_a?(Hash) ? value : {}
+ end
+
+ def polymorphic_options
+ as_options(polymorphic).merge(null: options[:null])
+ end
+
+ def index_options
+ as_options(index)
+ end
+
+ def foreign_key_options
+ as_options(foreign_key).merge(column: column_name)
+ end
+
+ def columns
+ result = [[column_name, type, options]]
+ if polymorphic
+ result.unshift(["#{name}_type", :string, polymorphic_options])
+ end
+ result
+ end
+
+ def column_name
+ "#{name}_id"
+ end
+
+ def column_names
+ columns.map(&:first)
+ end
+
+ def foreign_table_name
+ foreign_key_options.fetch(:to_table) do
+ Base.pluralize_table_names ? name.to_s.pluralize : name
+ end
+ end
+ end
+
+ module ColumnMethods
+ # Appends a primary key definition to the table definition.
+ # Can be called multiple times, but this is probably not a good idea.
+ def primary_key(name, type = :primary_key, **options)
+ column(name, type, options.merge(primary_key: true))
+ end
+
+ # Appends a column or columns of a specified type.
+ #
+ # t.string(:goat)
+ # t.string(:goat, :sheep)
+ #
+ # See TableDefinition#column
+ [
+ :bigint,
+ :binary,
+ :boolean,
+ :date,
+ :datetime,
+ :decimal,
+ :float,
+ :integer,
+ :string,
+ :text,
+ :time,
+ :timestamp,
+ :virtual,
+ ].each do |column_type|
+ module_eval <<-CODE, __FILE__, __LINE__ + 1
+ def #{column_type}(*args, **options)
+ args.each { |name| column(name, :#{column_type}, options) }
+ end
+ CODE
+ end
+ alias_method :numeric, :decimal
+ end
+
+ # Represents the schema of an SQL table in an abstract way. This class
+ # provides methods for manipulating the schema representation.
+ #
+ # Inside migration files, the +t+ object in {create_table}[rdoc-ref:SchemaStatements#create_table]
+ # is actually of this type:
+ #
+ # class SomeMigration < ActiveRecord::Migration[5.0]
+ # def up
+ # create_table :foo do |t|
+ # puts t.class # => "ActiveRecord::ConnectionAdapters::TableDefinition"
+ # end
+ # end
+ #
+ # def down
+ # ...
+ # end
+ # end
+ #
+ class TableDefinition
+ include ColumnMethods
+
+ attr_accessor :indexes
+ attr_reader :name, :temporary, :options, :as, :foreign_keys, :comment
+
+ def initialize(name, temporary = false, options = nil, as = nil, comment: nil)
+ @columns_hash = {}
+ @indexes = []
+ @foreign_keys = []
+ @primary_keys = nil
+ @temporary = temporary
+ @options = options
+ @as = as
+ @name = name
+ @comment = comment
+ end
+
+ def primary_keys(name = nil) # :nodoc:
+ @primary_keys = PrimaryKeyDefinition.new(name) if name
+ @primary_keys
+ end
+
+ # Returns an array of ColumnDefinition objects for the columns of the table.
+ def columns; @columns_hash.values; end
+
+ # Returns a ColumnDefinition for the column with name +name+.
+ def [](name)
+ @columns_hash[name.to_s]
+ end
+
+ # Instantiates a new column for the table.
+ # See {connection.add_column}[rdoc-ref:ConnectionAdapters::SchemaStatements#add_column]
+ # for available options.
+ #
+ # Additional options are:
+ # * <tt>:index</tt> -
+ # Create an index for the column. Can be either <tt>true</tt> or an options hash.
+ #
+ # This method returns <tt>self</tt>.
+ #
+ # == Examples
+ #
+ # # Assuming +td+ is an instance of TableDefinition
+ # td.column(:granted, :boolean, index: true)
+ #
+ # == Short-hand examples
+ #
+ # Instead of calling #column directly, you can also work with the short-hand definitions for the default types.
+ # They use the type as the method name instead of as a parameter and allow for multiple columns to be defined
+ # in a single statement.
+ #
+ # What can be written like this with the regular calls to column:
+ #
+ # create_table :products do |t|
+ # t.column :shop_id, :integer
+ # t.column :creator_id, :integer
+ # t.column :item_number, :string
+ # t.column :name, :string, default: "Untitled"
+ # t.column :value, :string, default: "Untitled"
+ # t.column :created_at, :datetime
+ # t.column :updated_at, :datetime
+ # end
+ # add_index :products, :item_number
+ #
+ # can also be written as follows using the short-hand:
+ #
+ # create_table :products do |t|
+ # t.integer :shop_id, :creator_id
+ # t.string :item_number, index: true
+ # t.string :name, :value, default: "Untitled"
+ # t.timestamps null: false
+ # end
+ #
+ # There's a short-hand method for each of the type values declared at the top. And then there's
+ # TableDefinition#timestamps that'll add +created_at+ and +updated_at+ as datetimes.
+ #
+ # TableDefinition#references will add an appropriately-named _id column, plus a corresponding _type
+ # column if the <tt>:polymorphic</tt> option is supplied. If <tt>:polymorphic</tt> is a hash of
+ # options, these will be used when creating the <tt>_type</tt> column. The <tt>:index</tt> option
+ # will also create an index, similar to calling {add_index}[rdoc-ref:ConnectionAdapters::SchemaStatements#add_index].
+ # So what can be written like this:
+ #
+ # create_table :taggings do |t|
+ # t.integer :tag_id, :tagger_id, :taggable_id
+ # t.string :tagger_type
+ # t.string :taggable_type, default: 'Photo'
+ # end
+ # add_index :taggings, :tag_id, name: 'index_taggings_on_tag_id'
+ # add_index :taggings, [:tagger_id, :tagger_type]
+ #
+ # Can also be written as follows using references:
+ #
+ # create_table :taggings do |t|
+ # t.references :tag, index: { name: 'index_taggings_on_tag_id' }
+ # t.references :tagger, polymorphic: true, index: true
+ # t.references :taggable, polymorphic: { default: 'Photo' }
+ # end
+ def column(name, type, options = {})
+ name = name.to_s
+ type = type.to_sym if type
+ options = options.dup
+
+ if @columns_hash[name] && @columns_hash[name].primary_key?
+ raise ArgumentError, "you can't redefine the primary key column '#{name}'. To define a custom primary key, pass { id: false } to create_table."
+ end
+
+ index_options = options.delete(:index)
+ index(name, index_options.is_a?(Hash) ? index_options : {}) if index_options
+ @columns_hash[name] = new_column_definition(name, type, options)
+ self
+ end
+
+ # remove the column +name+ from the table.
+ # remove_column(:account_id)
+ def remove_column(name)
+ @columns_hash.delete name.to_s
+ end
+
+ # Adds index options to the indexes hash, keyed by column name
+ # This is primarily used to track indexes that need to be created after the table
+ #
+ # index(:account_id, name: 'index_projects_on_account_id')
+ def index(column_name, options = {})
+ indexes << [column_name, options]
+ end
+
+ def foreign_key(table_name, options = {}) # :nodoc:
+ table_name_prefix = ActiveRecord::Base.table_name_prefix
+ table_name_suffix = ActiveRecord::Base.table_name_suffix
+ table_name = "#{table_name_prefix}#{table_name}#{table_name_suffix}"
+ foreign_keys.push([table_name, options])
+ end
+
+ # Appends <tt>:datetime</tt> columns <tt>:created_at</tt> and
+ # <tt>:updated_at</tt> to the table. See {connection.add_timestamps}[rdoc-ref:SchemaStatements#add_timestamps]
+ #
+ # t.timestamps null: false
+ def timestamps(**options)
+ options[:null] = false if options[:null].nil?
+
+ column(:created_at, :datetime, options)
+ column(:updated_at, :datetime, options)
+ end
+
+ # Adds a reference.
+ #
+ # t.references(:user)
+ # t.belongs_to(:supplier, foreign_key: true)
+ #
+ # See {connection.add_reference}[rdoc-ref:SchemaStatements#add_reference] for details of the options you can use.
+ def references(*args, **options)
+ args.each do |ref_name|
+ ReferenceDefinition.new(ref_name, options).add_to(self)
+ end
+ end
+ alias :belongs_to :references
+
+ def new_column_definition(name, type, **options) # :nodoc:
+ type = aliased_types(type.to_s, type)
+ options[:primary_key] ||= type == :primary_key
+ options[:null] = false if options[:primary_key]
+ create_column_definition(name, type, options)
+ end
+
+ private
+ def create_column_definition(name, type, options)
+ ColumnDefinition.new(name, type, options)
+ end
+
+ def aliased_types(name, fallback)
+ "timestamp" == name ? :datetime : fallback
+ end
+ end
+
+ class AlterTable # :nodoc:
+ attr_reader :adds
+ attr_reader :foreign_key_adds
+ attr_reader :foreign_key_drops
+
+ def initialize(td)
+ @td = td
+ @adds = []
+ @foreign_key_adds = []
+ @foreign_key_drops = []
+ end
+
+ def name; @td.name; end
+
+ def add_foreign_key(to_table, options)
+ @foreign_key_adds << ForeignKeyDefinition.new(name, to_table, options)
+ end
+
+ def drop_foreign_key(name)
+ @foreign_key_drops << name
+ end
+
+ def add_column(name, type, options)
+ name = name.to_s
+ type = type.to_sym
+ @adds << AddColumnDefinition.new(@td.new_column_definition(name, type, options))
+ end
+ end
+
+ # Represents an SQL table in an abstract way for updating a table.
+ # Also see TableDefinition and {connection.create_table}[rdoc-ref:SchemaStatements#create_table]
+ #
+ # Available transformations are:
+ #
+ # change_table :table do |t|
+ # t.primary_key
+ # t.column
+ # t.index
+ # t.rename_index
+ # t.timestamps
+ # t.change
+ # t.change_default
+ # t.rename
+ # t.references
+ # t.belongs_to
+ # t.string
+ # t.text
+ # t.integer
+ # t.bigint
+ # t.float
+ # t.decimal
+ # t.numeric
+ # t.datetime
+ # t.timestamp
+ # t.time
+ # t.date
+ # t.binary
+ # t.boolean
+ # t.remove
+ # t.remove_references
+ # t.remove_belongs_to
+ # t.remove_index
+ # t.remove_timestamps
+ # end
+ #
+ class Table
+ include ColumnMethods
+
+ attr_reader :name
+
+ def initialize(table_name, base)
+ @name = table_name
+ @base = base
+ end
+
+ # Adds a new column to the named table.
+ #
+ # t.column(:name, :string)
+ #
+ # See TableDefinition#column for details of the options you can use.
+ def column(column_name, type, options = {})
+ @base.add_column(name, column_name, type, options)
+ end
+
+ # Checks to see if a column exists.
+ #
+ # t.string(:name) unless t.column_exists?(:name, :string)
+ #
+ # See {connection.column_exists?}[rdoc-ref:SchemaStatements#column_exists?]
+ def column_exists?(column_name, type = nil, options = {})
+ @base.column_exists?(name, column_name, type, options)
+ end
+
+ # Adds a new index to the table. +column_name+ can be a single Symbol, or
+ # an Array of Symbols.
+ #
+ # t.index(:name)
+ # t.index([:branch_id, :party_id], unique: true)
+ # t.index([:branch_id, :party_id], unique: true, name: 'by_branch_party')
+ #
+ # See {connection.add_index}[rdoc-ref:SchemaStatements#add_index] for details of the options you can use.
+ def index(column_name, options = {})
+ @base.add_index(name, column_name, options)
+ end
+
+ # Checks to see if an index exists.
+ #
+ # unless t.index_exists?(:branch_id)
+ # t.index(:branch_id)
+ # end
+ #
+ # See {connection.index_exists?}[rdoc-ref:SchemaStatements#index_exists?]
+ def index_exists?(column_name, options = {})
+ @base.index_exists?(name, column_name, options)
+ end
+
+ # Renames the given index on the table.
+ #
+ # t.rename_index(:user_id, :account_id)
+ #
+ # See {connection.rename_index}[rdoc-ref:SchemaStatements#rename_index]
+ def rename_index(index_name, new_index_name)
+ @base.rename_index(name, index_name, new_index_name)
+ end
+
+ # Adds timestamps (+created_at+ and +updated_at+) columns to the table.
+ #
+ # t.timestamps(null: false)
+ #
+ # See {connection.add_timestamps}[rdoc-ref:SchemaStatements#add_timestamps]
+ def timestamps(options = {})
+ @base.add_timestamps(name, options)
+ end
+
+ # Changes the column's definition according to the new options.
+ #
+ # t.change(:name, :string, limit: 80)
+ # t.change(:description, :text)
+ #
+ # See TableDefinition#column for details of the options you can use.
+ def change(column_name, type, options = {})
+ @base.change_column(name, column_name, type, options)
+ end
+
+ # Sets a new default value for a column.
+ #
+ # t.change_default(:qualification, 'new')
+ # t.change_default(:authorized, 1)
+ # t.change_default(:status, from: nil, to: "draft")
+ #
+ # See {connection.change_column_default}[rdoc-ref:SchemaStatements#change_column_default]
+ def change_default(column_name, default_or_changes)
+ @base.change_column_default(name, column_name, default_or_changes)
+ end
+
+ # Removes the column(s) from the table definition.
+ #
+ # t.remove(:qualification)
+ # t.remove(:qualification, :experience)
+ #
+ # See {connection.remove_columns}[rdoc-ref:SchemaStatements#remove_columns]
+ def remove(*column_names)
+ @base.remove_columns(name, *column_names)
+ end
+
+ # Removes the given index from the table.
+ #
+ # t.remove_index(:branch_id)
+ # t.remove_index(column: [:branch_id, :party_id])
+ # t.remove_index(name: :by_branch_party)
+ #
+ # See {connection.remove_index}[rdoc-ref:SchemaStatements#remove_index]
+ def remove_index(options = {})
+ @base.remove_index(name, options)
+ end
+
+ # Removes the timestamp columns (+created_at+ and +updated_at+) from the table.
+ #
+ # t.remove_timestamps
+ #
+ # See {connection.remove_timestamps}[rdoc-ref:SchemaStatements#remove_timestamps]
+ def remove_timestamps(options = {})
+ @base.remove_timestamps(name, options)
+ end
+
+ # Renames a column.
+ #
+ # t.rename(:description, :name)
+ #
+ # See {connection.rename_column}[rdoc-ref:SchemaStatements#rename_column]
+ def rename(column_name, new_column_name)
+ @base.rename_column(name, column_name, new_column_name)
+ end
+
+ # Adds a reference.
+ #
+ # t.references(:user)
+ # t.belongs_to(:supplier, foreign_key: true)
+ #
+ # See {connection.add_reference}[rdoc-ref:SchemaStatements#add_reference] for details of the options you can use.
+ def references(*args, **options)
+ args.each do |ref_name|
+ @base.add_reference(name, ref_name, options)
+ end
+ end
+ alias :belongs_to :references
+
+ # Removes a reference. Optionally removes a +type+ column.
+ #
+ # t.remove_references(:user)
+ # t.remove_belongs_to(:supplier, polymorphic: true)
+ #
+ # See {connection.remove_reference}[rdoc-ref:SchemaStatements#remove_reference]
+ def remove_references(*args, **options)
+ args.each do |ref_name|
+ @base.remove_reference(name, ref_name, options)
+ end
+ end
+ alias :remove_belongs_to :remove_references
+
+ # Adds a foreign key.
+ #
+ # t.foreign_key(:authors)
+ #
+ # See {connection.add_foreign_key}[rdoc-ref:SchemaStatements#add_foreign_key]
+ def foreign_key(*args) # :nodoc:
+ @base.add_foreign_key(name, *args)
+ end
+
+ # Checks to see if a foreign key exists.
+ #
+ # t.foreign_key(:authors) unless t.foreign_key_exists?(:authors)
+ #
+ # See {connection.foreign_key_exists?}[rdoc-ref:SchemaStatements#foreign_key_exists?]
+ def foreign_key_exists?(*args) # :nodoc:
+ @base.foreign_key_exists?(name, *args)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb
new file mode 100644
index 0000000000..9be26254b2
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb
@@ -0,0 +1,119 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters # :nodoc:
+ # The goal of this module is to move Adapter specific column
+ # definitions to the Adapter instead of having it in the schema
+ # dumper itself. This code represents the normal case.
+ # We can then redefine how certain data types may be handled in the schema dumper on the
+ # Adapter level by over-writing this code inside the database specific adapters
+ module ColumnDumper
+ def column_spec(column)
+ [schema_type_with_virtual(column), prepare_column_options(column)]
+ end
+
+ def column_spec_for_primary_key(column)
+ return {} if default_primary_key?(column)
+ spec = { id: schema_type(column).inspect }
+ spec.merge!(prepare_column_options(column).except!(:null))
+ spec[:default] ||= "nil" if explicit_primary_key_default?(column)
+ spec
+ end
+
+ # This can be overridden on an Adapter level basis to support other
+ # extended datatypes (Example: Adding an array option in the
+ # PostgreSQL::ColumnDumper)
+ def prepare_column_options(column)
+ spec = {}
+
+ if limit = schema_limit(column)
+ spec[:limit] = limit
+ end
+
+ if precision = schema_precision(column)
+ spec[:precision] = precision
+ end
+
+ if scale = schema_scale(column)
+ spec[:scale] = scale
+ end
+
+ default = schema_default(column) if column.has_default?
+ spec[:default] = default unless default.nil?
+
+ spec[:null] = "false" unless column.null
+
+ if collation = schema_collation(column)
+ spec[:collation] = collation
+ end
+
+ spec[:comment] = column.comment.inspect if column.comment.present?
+
+ spec
+ end
+
+ # Lists the valid migration options
+ def migration_keys # :nodoc:
+ column_options_keys
+ end
+ deprecate :migration_keys
+
+ private
+
+ def default_primary_key?(column)
+ schema_type(column) == :bigint
+ end
+
+ def explicit_primary_key_default?(column)
+ false
+ end
+
+ def schema_type_with_virtual(column)
+ if supports_virtual_columns? && column.virtual?
+ :virtual
+ else
+ schema_type(column)
+ end
+ end
+
+ def schema_type(column)
+ if column.bigint?
+ :bigint
+ else
+ column.type
+ end
+ end
+
+ def schema_limit(column)
+ limit = column.limit unless column.bigint?
+ limit.inspect if limit && limit != native_database_types[column.type][:limit]
+ end
+
+ def schema_precision(column)
+ column.precision.inspect if column.precision
+ end
+
+ def schema_scale(column)
+ column.scale.inspect if column.scale
+ end
+
+ def schema_default(column)
+ type = lookup_cast_type_from_column(column)
+ default = type.deserialize(column.default)
+ if default.nil?
+ schema_expression(column)
+ else
+ type.type_cast_for_schema(default)
+ end
+ end
+
+ def schema_expression(column)
+ "-> { #{column.default_function.inspect} }" if column.default_function
+ end
+
+ def schema_collation(column)
+ column.collation.inspect if column.collation
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
new file mode 100644
index 0000000000..e21f93856e
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
@@ -0,0 +1,1372 @@
+# frozen_string_literal: true
+
+require_relative "../../migration/join_table"
+require "active_support/core_ext/string/access"
+require "digest"
+
+module ActiveRecord
+ module ConnectionAdapters # :nodoc:
+ module SchemaStatements
+ include ActiveRecord::Migration::JoinTable
+
+ # Returns a hash of mappings from the abstract data types to the native
+ # database types. See TableDefinition#column for details on the recognized
+ # abstract data types.
+ def native_database_types
+ {}
+ end
+
+ def table_options(table_name)
+ nil
+ end
+
+ # Returns the table comment that's stored in database metadata.
+ def table_comment(table_name)
+ nil
+ end
+
+ # Truncates a table alias according to the limits of the current adapter.
+ def table_alias_for(table_name)
+ table_name[0...table_alias_length].tr(".", "_")
+ end
+
+ # Returns the relation names useable to back Active Record models.
+ # For most adapters this means all #tables and #views.
+ def data_sources
+ query_values(data_source_sql, "SCHEMA")
+ rescue NotImplementedError
+ tables | views
+ end
+
+ # Checks to see if the data source +name+ exists on the database.
+ #
+ # data_source_exists?(:ebooks)
+ #
+ def data_source_exists?(name)
+ query_values(data_source_sql(name), "SCHEMA").any? if name.present?
+ rescue NotImplementedError
+ data_sources.include?(name.to_s)
+ end
+
+ # Returns an array of table names defined in the database.
+ def tables
+ query_values(data_source_sql(type: "BASE TABLE"), "SCHEMA")
+ end
+
+ # Checks to see if the table +table_name+ exists on the database.
+ #
+ # table_exists?(:developers)
+ #
+ def table_exists?(table_name)
+ query_values(data_source_sql(table_name, type: "BASE TABLE"), "SCHEMA").any? if table_name.present?
+ rescue NotImplementedError
+ tables.include?(table_name.to_s)
+ end
+
+ # Returns an array of view names defined in the database.
+ def views
+ query_values(data_source_sql(type: "VIEW"), "SCHEMA")
+ end
+
+ # Checks to see if the view +view_name+ exists on the database.
+ #
+ # view_exists?(:ebooks)
+ #
+ def view_exists?(view_name)
+ query_values(data_source_sql(view_name, type: "VIEW"), "SCHEMA").any? if view_name.present?
+ rescue NotImplementedError
+ views.include?(view_name.to_s)
+ end
+
+ # Returns an array of indexes for the given table.
+ def indexes(table_name, name = nil)
+ raise NotImplementedError, "#indexes is not implemented"
+ end
+
+ # Checks to see if an index exists on a table for a given index definition.
+ #
+ # # Check an index exists
+ # index_exists?(:suppliers, :company_id)
+ #
+ # # Check an index on multiple columns exists
+ # index_exists?(:suppliers, [:company_id, :company_type])
+ #
+ # # Check a unique index exists
+ # index_exists?(:suppliers, :company_id, unique: true)
+ #
+ # # Check an index with a custom name exists
+ # index_exists?(:suppliers, :company_id, name: "idx_company_id")
+ #
+ def index_exists?(table_name, column_name, options = {})
+ column_names = Array(column_name).map(&:to_s)
+ checks = []
+ checks << lambda { |i| i.columns == column_names }
+ checks << lambda { |i| i.unique } if options[:unique]
+ checks << lambda { |i| i.name == options[:name].to_s } if options[:name]
+
+ indexes(table_name).any? { |i| checks.all? { |check| check[i] } }
+ end
+
+ # Returns an array of +Column+ objects for the table specified by +table_name+.
+ def columns(table_name)
+ table_name = table_name.to_s
+ column_definitions(table_name).map do |field|
+ new_column_from_field(table_name, field)
+ end
+ end
+
+ # Checks to see if a column exists in a given table.
+ #
+ # # Check a column exists
+ # column_exists?(:suppliers, :name)
+ #
+ # # Check a column exists of a particular type
+ # column_exists?(:suppliers, :name, :string)
+ #
+ # # Check a column exists with a specific definition
+ # column_exists?(:suppliers, :name, :string, limit: 100)
+ # column_exists?(:suppliers, :name, :string, default: 'default')
+ # column_exists?(:suppliers, :name, :string, null: false)
+ # column_exists?(:suppliers, :tax, :decimal, precision: 8, scale: 2)
+ #
+ def column_exists?(table_name, column_name, type = nil, options = {})
+ column_name = column_name.to_s
+ checks = []
+ checks << lambda { |c| c.name == column_name }
+ checks << lambda { |c| c.type == type } if type
+ column_options_keys.each do |attr|
+ checks << lambda { |c| c.send(attr) == options[attr] } if options.key?(attr)
+ end
+
+ columns(table_name).any? { |c| checks.all? { |check| check[c] } }
+ end
+
+ # Returns just a table's primary key
+ def primary_key(table_name)
+ pk = primary_keys(table_name)
+ pk = pk.first unless pk.size > 1
+ pk
+ end
+
+ # Creates a new table with the name +table_name+. +table_name+ may either
+ # be a String or a Symbol.
+ #
+ # There are two ways to work with #create_table. You can use the block
+ # form or the regular form, like this:
+ #
+ # === Block form
+ #
+ # # create_table() passes a TableDefinition object to the block.
+ # # This form will not only create the table, but also columns for the
+ # # table.
+ #
+ # create_table(:suppliers) do |t|
+ # t.column :name, :string, limit: 60
+ # # Other fields here
+ # end
+ #
+ # === Block form, with shorthand
+ #
+ # # You can also use the column types as method calls, rather than calling the column method.
+ # create_table(:suppliers) do |t|
+ # t.string :name, limit: 60
+ # # Other fields here
+ # end
+ #
+ # === Regular form
+ #
+ # # Creates a table called 'suppliers' with no columns.
+ # create_table(:suppliers)
+ # # Add a column to 'suppliers'.
+ # add_column(:suppliers, :name, :string, {limit: 60})
+ #
+ # The +options+ hash can include the following keys:
+ # [<tt>:id</tt>]
+ # Whether to automatically add a primary key column. Defaults to true.
+ # Join tables for {ActiveRecord::Base.has_and_belongs_to_many}[rdoc-ref:Associations::ClassMethods#has_and_belongs_to_many] should set it to false.
+ #
+ # A Symbol can be used to specify the type of the generated primary key column.
+ # [<tt>:primary_key</tt>]
+ # The name of the primary key, if one is to be added automatically.
+ # Defaults to +id+. If <tt>:id</tt> is false, then this option is ignored.
+ #
+ # If an array is passed, a composite primary key will be created.
+ #
+ # Note that Active Record models will automatically detect their
+ # primary key. This can be avoided by using
+ # {self.primary_key=}[rdoc-ref:AttributeMethods::PrimaryKey::ClassMethods#primary_key=] on the model
+ # to define the key explicitly.
+ #
+ # [<tt>:options</tt>]
+ # Any extra options you want appended to the table definition.
+ # [<tt>:temporary</tt>]
+ # Make a temporary table.
+ # [<tt>:force</tt>]
+ # Set to true to drop the table before creating it.
+ # Set to +:cascade+ to drop dependent objects as well.
+ # Defaults to false.
+ # [<tt>:as</tt>]
+ # SQL to use to generate the table. When this option is used, the block is
+ # ignored, as are the <tt>:id</tt> and <tt>:primary_key</tt> options.
+ #
+ # ====== Add a backend specific option to the generated SQL (MySQL)
+ #
+ # create_table(:suppliers, options: 'ENGINE=InnoDB DEFAULT CHARSET=utf8')
+ #
+ # generates:
+ #
+ # CREATE TABLE suppliers (
+ # id int auto_increment PRIMARY KEY
+ # ) ENGINE=InnoDB DEFAULT CHARSET=utf8
+ #
+ # ====== Rename the primary key column
+ #
+ # create_table(:objects, primary_key: 'guid') do |t|
+ # t.column :name, :string, limit: 80
+ # end
+ #
+ # generates:
+ #
+ # CREATE TABLE objects (
+ # guid int auto_increment PRIMARY KEY,
+ # name varchar(80)
+ # )
+ #
+ # ====== Change the primary key column type
+ #
+ # create_table(:tags, id: :string) do |t|
+ # t.column :label, :string
+ # end
+ #
+ # generates:
+ #
+ # CREATE TABLE tags (
+ # id varchar PRIMARY KEY,
+ # label varchar
+ # )
+ #
+ # ====== Create a composite primary key
+ #
+ # create_table(:orders, primary_key: [:product_id, :client_id]) do |t|
+ # t.belongs_to :product
+ # t.belongs_to :client
+ # end
+ #
+ # generates:
+ #
+ # CREATE TABLE order (
+ # product_id integer NOT NULL,
+ # client_id integer NOT NULL
+ # );
+ #
+ # ALTER TABLE ONLY "orders"
+ # ADD CONSTRAINT orders_pkey PRIMARY KEY (product_id, client_id);
+ #
+ # ====== Do not add a primary key column
+ #
+ # create_table(:categories_suppliers, id: false) do |t|
+ # t.column :category_id, :integer
+ # t.column :supplier_id, :integer
+ # end
+ #
+ # generates:
+ #
+ # CREATE TABLE categories_suppliers (
+ # category_id int,
+ # supplier_id int
+ # )
+ #
+ # ====== Create a temporary table based on a query
+ #
+ # create_table(:long_query, temporary: true,
+ # as: "SELECT * FROM orders INNER JOIN line_items ON order_id=orders.id")
+ #
+ # generates:
+ #
+ # CREATE TEMPORARY TABLE long_query AS
+ # SELECT * FROM orders INNER JOIN line_items ON order_id=orders.id
+ #
+ # See also TableDefinition#column for details on how to create columns.
+ def create_table(table_name, comment: nil, **options)
+ td = create_table_definition table_name, options[:temporary], options[:options], options[:as], comment: comment
+
+ if options[:id] != false && !options[:as]
+ pk = options.fetch(:primary_key) do
+ Base.get_primary_key table_name.to_s.singularize
+ end
+
+ if pk.is_a?(Array)
+ td.primary_keys pk
+ else
+ td.primary_key pk, options.fetch(:id, :primary_key), options
+ end
+ end
+
+ yield td if block_given?
+
+ if options[:force]
+ drop_table(table_name, **options, if_exists: true)
+ end
+
+ result = execute schema_creation.accept td
+
+ unless supports_indexes_in_create?
+ td.indexes.each do |column_name, index_options|
+ add_index(table_name, column_name, index_options)
+ end
+ end
+
+ if supports_comments? && !supports_comments_in_create?
+ change_table_comment(table_name, comment) if comment.present?
+
+ td.columns.each do |column|
+ change_column_comment(table_name, column.name, column.comment) if column.comment.present?
+ end
+ end
+
+ result
+ end
+
+ # Creates a new join table with the name created using the lexical order of the first two
+ # arguments. These arguments can be a String or a Symbol.
+ #
+ # # Creates a table called 'assemblies_parts' with no id.
+ # create_join_table(:assemblies, :parts)
+ #
+ # You can pass an +options+ hash which can include the following keys:
+ # [<tt>:table_name</tt>]
+ # Sets the table name, overriding the default.
+ # [<tt>:column_options</tt>]
+ # Any extra options you want appended to the columns definition.
+ # [<tt>:options</tt>]
+ # Any extra options you want appended to the table definition.
+ # [<tt>:temporary</tt>]
+ # Make a temporary table.
+ # [<tt>:force</tt>]
+ # Set to true to drop the table before creating it.
+ # Defaults to false.
+ #
+ # Note that #create_join_table does not create any indices by default; you can use
+ # its block form to do so yourself:
+ #
+ # create_join_table :products, :categories do |t|
+ # t.index :product_id
+ # t.index :category_id
+ # end
+ #
+ # ====== Add a backend specific option to the generated SQL (MySQL)
+ #
+ # create_join_table(:assemblies, :parts, options: 'ENGINE=InnoDB DEFAULT CHARSET=utf8')
+ #
+ # generates:
+ #
+ # CREATE TABLE assemblies_parts (
+ # assembly_id int NOT NULL,
+ # part_id int NOT NULL,
+ # ) ENGINE=InnoDB DEFAULT CHARSET=utf8
+ #
+ def create_join_table(table_1, table_2, column_options: {}, **options)
+ join_table_name = find_join_table_name(table_1, table_2, options)
+
+ column_options.reverse_merge!(null: false, index: false)
+
+ t1_ref, t2_ref = [table_1, table_2].map { |t| t.to_s.singularize }
+
+ create_table(join_table_name, options.merge!(id: false)) do |td|
+ td.references t1_ref, column_options
+ td.references t2_ref, column_options
+ yield td if block_given?
+ end
+ end
+
+ # Drops the join table specified by the given arguments.
+ # See #create_join_table for details.
+ #
+ # Although this command ignores the block if one is given, it can be helpful
+ # to provide one in a migration's +change+ method so it can be reverted.
+ # In that case, the block will be used by #create_join_table.
+ def drop_join_table(table_1, table_2, options = {})
+ join_table_name = find_join_table_name(table_1, table_2, options)
+ drop_table(join_table_name)
+ end
+
+ # A block for changing columns in +table+.
+ #
+ # # change_table() yields a Table instance
+ # change_table(:suppliers) do |t|
+ # t.column :name, :string, limit: 60
+ # # Other column alterations here
+ # end
+ #
+ # The +options+ hash can include the following keys:
+ # [<tt>:bulk</tt>]
+ # Set this to true to make this a bulk alter query, such as
+ #
+ # ALTER TABLE `users` ADD COLUMN age INT, ADD COLUMN birthdate DATETIME ...
+ #
+ # Defaults to false.
+ #
+ # ====== Add a column
+ #
+ # change_table(:suppliers) do |t|
+ # t.column :name, :string, limit: 60
+ # end
+ #
+ # ====== Add 2 integer columns
+ #
+ # change_table(:suppliers) do |t|
+ # t.integer :width, :height, null: false, default: 0
+ # end
+ #
+ # ====== Add created_at/updated_at columns
+ #
+ # change_table(:suppliers) do |t|
+ # t.timestamps
+ # end
+ #
+ # ====== Add a foreign key column
+ #
+ # change_table(:suppliers) do |t|
+ # t.references :company
+ # end
+ #
+ # Creates a <tt>company_id(integer)</tt> column.
+ #
+ # ====== Add a polymorphic foreign key column
+ #
+ # change_table(:suppliers) do |t|
+ # t.belongs_to :company, polymorphic: true
+ # end
+ #
+ # Creates <tt>company_type(varchar)</tt> and <tt>company_id(integer)</tt> columns.
+ #
+ # ====== Remove a column
+ #
+ # change_table(:suppliers) do |t|
+ # t.remove :company
+ # end
+ #
+ # ====== Remove several columns
+ #
+ # change_table(:suppliers) do |t|
+ # t.remove :company_id
+ # t.remove :width, :height
+ # end
+ #
+ # ====== Remove an index
+ #
+ # change_table(:suppliers) do |t|
+ # t.remove_index :company_id
+ # end
+ #
+ # See also Table for details on all of the various column transformations.
+ def change_table(table_name, options = {})
+ if supports_bulk_alter? && options[:bulk]
+ recorder = ActiveRecord::Migration::CommandRecorder.new(self)
+ yield update_table_definition(table_name, recorder)
+ bulk_change_table(table_name, recorder.commands)
+ else
+ yield update_table_definition(table_name, self)
+ end
+ end
+
+ # Renames a table.
+ #
+ # rename_table('octopuses', 'octopi')
+ #
+ def rename_table(table_name, new_name)
+ raise NotImplementedError, "rename_table is not implemented"
+ end
+
+ # Drops a table from the database.
+ #
+ # [<tt>:force</tt>]
+ # Set to +:cascade+ to drop dependent objects as well.
+ # Defaults to false.
+ # [<tt>:if_exists</tt>]
+ # Set to +true+ to only drop the table if it exists.
+ # Defaults to false.
+ #
+ # Although this command ignores most +options+ and the block if one is given,
+ # it can be helpful to provide these in a migration's +change+ method so it can be reverted.
+ # In that case, +options+ and the block will be used by #create_table.
+ def drop_table(table_name, options = {})
+ execute "DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_table_name(table_name)}"
+ end
+
+ # Add a new +type+ column named +column_name+ to +table_name+.
+ #
+ # The +type+ parameter is normally one of the migrations native types,
+ # which is one of the following:
+ # <tt>:primary_key</tt>, <tt>:string</tt>, <tt>:text</tt>,
+ # <tt>:integer</tt>, <tt>:bigint</tt>, <tt>:float</tt>, <tt>:decimal</tt>, <tt>:numeric</tt>,
+ # <tt>:datetime</tt>, <tt>:time</tt>, <tt>:date</tt>,
+ # <tt>:binary</tt>, <tt>:boolean</tt>.
+ #
+ # You may use a type not in this list as long as it is supported by your
+ # database (for example, "polygon" in MySQL), but this will not be database
+ # agnostic and should usually be avoided.
+ #
+ # Available options are (none of these exists by default):
+ # * <tt>:limit</tt> -
+ # Requests a maximum column length. This is the number of characters for a <tt>:string</tt> column
+ # and number of bytes for <tt>:text</tt>, <tt>:binary</tt> and <tt>:integer</tt> columns.
+ # This option is ignored by some backends.
+ # * <tt>:default</tt> -
+ # The column's default value. Use +nil+ for +NULL+.
+ # * <tt>:null</tt> -
+ # Allows or disallows +NULL+ values in the column.
+ # * <tt>:precision</tt> -
+ # Specifies the precision for the <tt>:decimal</tt> and <tt>:numeric</tt> columns.
+ # * <tt>:scale</tt> -
+ # Specifies the scale for the <tt>:decimal</tt> and <tt>:numeric</tt> columns.
+ #
+ # Note: The precision is the total number of significant digits,
+ # and the scale is the number of digits that can be stored following
+ # the decimal point. For example, the number 123.45 has a precision of 5
+ # and a scale of 2. A decimal with a precision of 5 and a scale of 2 can
+ # range from -999.99 to 999.99.
+ #
+ # Please be aware of different RDBMS implementations behavior with
+ # <tt>:decimal</tt> columns:
+ # * The SQL standard says the default scale should be 0, <tt>:scale</tt> <=
+ # <tt>:precision</tt>, and makes no comments about the requirements of
+ # <tt>:precision</tt>.
+ # * MySQL: <tt>:precision</tt> [1..63], <tt>:scale</tt> [0..30].
+ # Default is (10,0).
+ # * PostgreSQL: <tt>:precision</tt> [1..infinity],
+ # <tt>:scale</tt> [0..infinity]. No default.
+ # * SQLite3: No restrictions on <tt>:precision</tt> and <tt>:scale</tt>,
+ # but the maximum supported <tt>:precision</tt> is 16. No default.
+ # * Oracle: <tt>:precision</tt> [1..38], <tt>:scale</tt> [-84..127].
+ # Default is (38,0).
+ # * DB2: <tt>:precision</tt> [1..63], <tt>:scale</tt> [0..62].
+ # Default unknown.
+ # * SqlServer: <tt>:precision</tt> [1..38], <tt>:scale</tt> [0..38].
+ # Default (38,0).
+ #
+ # == Examples
+ #
+ # add_column(:users, :picture, :binary, limit: 2.megabytes)
+ # # ALTER TABLE "users" ADD "picture" blob(2097152)
+ #
+ # add_column(:articles, :status, :string, limit: 20, default: 'draft', null: false)
+ # # ALTER TABLE "articles" ADD "status" varchar(20) DEFAULT 'draft' NOT NULL
+ #
+ # add_column(:answers, :bill_gates_money, :decimal, precision: 15, scale: 2)
+ # # ALTER TABLE "answers" ADD "bill_gates_money" decimal(15,2)
+ #
+ # add_column(:measurements, :sensor_reading, :decimal, precision: 30, scale: 20)
+ # # ALTER TABLE "measurements" ADD "sensor_reading" decimal(30,20)
+ #
+ # # While :scale defaults to zero on most databases, it
+ # # probably wouldn't hurt to include it.
+ # add_column(:measurements, :huge_integer, :decimal, precision: 30)
+ # # ALTER TABLE "measurements" ADD "huge_integer" decimal(30)
+ #
+ # # Defines a column that stores an array of a type.
+ # add_column(:users, :skills, :text, array: true)
+ # # ALTER TABLE "users" ADD "skills" text[]
+ #
+ # # Defines a column with a database-specific type.
+ # add_column(:shapes, :triangle, 'polygon')
+ # # ALTER TABLE "shapes" ADD "triangle" polygon
+ def add_column(table_name, column_name, type, options = {})
+ at = create_alter_table table_name
+ at.add_column(column_name, type, options)
+ execute schema_creation.accept at
+ end
+
+ # Removes the given columns from the table definition.
+ #
+ # remove_columns(:suppliers, :qualification, :experience)
+ #
+ def remove_columns(table_name, *column_names)
+ raise ArgumentError.new("You must specify at least one column name. Example: remove_columns(:people, :first_name)") if column_names.empty?
+ column_names.each do |column_name|
+ remove_column(table_name, column_name)
+ end
+ end
+
+ # Removes the column from the table definition.
+ #
+ # remove_column(:suppliers, :qualification)
+ #
+ # The +type+ and +options+ parameters will be ignored if present. It can be helpful
+ # to provide these in a migration's +change+ method so it can be reverted.
+ # In that case, +type+ and +options+ will be used by #add_column.
+ def remove_column(table_name, column_name, type = nil, options = {})
+ execute "ALTER TABLE #{quote_table_name(table_name)} DROP #{quote_column_name(column_name)}"
+ end
+
+ # Changes the column's definition according to the new options.
+ # See TableDefinition#column for details of the options you can use.
+ #
+ # change_column(:suppliers, :name, :string, limit: 80)
+ # change_column(:accounts, :description, :text)
+ #
+ def change_column(table_name, column_name, type, options = {})
+ raise NotImplementedError, "change_column is not implemented"
+ end
+
+ # Sets a new default value for a column:
+ #
+ # change_column_default(:suppliers, :qualification, 'new')
+ # change_column_default(:accounts, :authorized, 1)
+ #
+ # Setting the default to +nil+ effectively drops the default:
+ #
+ # change_column_default(:users, :email, nil)
+ #
+ # Passing a hash containing +:from+ and +:to+ will make this change
+ # reversible in migration:
+ #
+ # change_column_default(:posts, :state, from: nil, to: "draft")
+ #
+ def change_column_default(table_name, column_name, default_or_changes)
+ raise NotImplementedError, "change_column_default is not implemented"
+ end
+
+ # Sets or removes a <tt>NOT NULL</tt> constraint on a column. The +null+ flag
+ # indicates whether the value can be +NULL+. For example
+ #
+ # change_column_null(:users, :nickname, false)
+ #
+ # says nicknames cannot be +NULL+ (adds the constraint), whereas
+ #
+ # change_column_null(:users, :nickname, true)
+ #
+ # allows them to be +NULL+ (drops the constraint).
+ #
+ # The method accepts an optional fourth argument to replace existing
+ # <tt>NULL</tt>s with some other value. Use that one when enabling the
+ # constraint if needed, since otherwise those rows would not be valid.
+ #
+ # Please note the fourth argument does not set a column's default.
+ def change_column_null(table_name, column_name, null, default = nil)
+ raise NotImplementedError, "change_column_null is not implemented"
+ end
+
+ # Renames a column.
+ #
+ # rename_column(:suppliers, :description, :name)
+ #
+ def rename_column(table_name, column_name, new_column_name)
+ raise NotImplementedError, "rename_column is not implemented"
+ end
+
+ # Adds a new index to the table. +column_name+ can be a single Symbol, or
+ # an Array of Symbols.
+ #
+ # The index will be named after the table and the column name(s), unless
+ # you pass <tt>:name</tt> as an option.
+ #
+ # ====== Creating a simple index
+ #
+ # add_index(:suppliers, :name)
+ #
+ # generates:
+ #
+ # CREATE INDEX suppliers_name_index ON suppliers(name)
+ #
+ # ====== Creating a unique index
+ #
+ # add_index(:accounts, [:branch_id, :party_id], unique: true)
+ #
+ # generates:
+ #
+ # CREATE UNIQUE INDEX accounts_branch_id_party_id_index ON accounts(branch_id, party_id)
+ #
+ # ====== Creating a named index
+ #
+ # add_index(:accounts, [:branch_id, :party_id], unique: true, name: 'by_branch_party')
+ #
+ # generates:
+ #
+ # CREATE UNIQUE INDEX by_branch_party ON accounts(branch_id, party_id)
+ #
+ # ====== Creating an index with specific key length
+ #
+ # add_index(:accounts, :name, name: 'by_name', length: 10)
+ #
+ # generates:
+ #
+ # CREATE INDEX by_name ON accounts(name(10))
+ #
+ # ====== Creating an index with specific key lengths for multiple keys
+ #
+ # add_index(:accounts, [:name, :surname], name: 'by_name_surname', length: {name: 10, surname: 15})
+ #
+ # generates:
+ #
+ # CREATE INDEX by_name_surname ON accounts(name(10), surname(15))
+ #
+ # Note: SQLite doesn't support index length.
+ #
+ # ====== Creating an index with a sort order (desc or asc, asc is the default)
+ #
+ # add_index(:accounts, [:branch_id, :party_id, :surname], order: {branch_id: :desc, party_id: :asc})
+ #
+ # generates:
+ #
+ # CREATE INDEX by_branch_desc_party ON accounts(branch_id DESC, party_id ASC, surname)
+ #
+ # Note: MySQL doesn't yet support index order (it accepts the syntax but ignores it).
+ #
+ # ====== Creating a partial index
+ #
+ # add_index(:accounts, [:branch_id, :party_id], unique: true, where: "active")
+ #
+ # generates:
+ #
+ # CREATE UNIQUE INDEX index_accounts_on_branch_id_and_party_id ON accounts(branch_id, party_id) WHERE active
+ #
+ # Note: Partial indexes are only supported for PostgreSQL and SQLite 3.8.0+.
+ #
+ # ====== Creating an index with a specific method
+ #
+ # add_index(:developers, :name, using: 'btree')
+ #
+ # generates:
+ #
+ # CREATE INDEX index_developers_on_name ON developers USING btree (name) -- PostgreSQL
+ # CREATE INDEX index_developers_on_name USING btree ON developers (name) -- MySQL
+ #
+ # Note: only supported by PostgreSQL and MySQL
+ #
+ # ====== Creating an index with a specific type
+ #
+ # add_index(:developers, :name, type: :fulltext)
+ #
+ # generates:
+ #
+ # CREATE FULLTEXT INDEX index_developers_on_name ON developers (name) -- MySQL
+ #
+ # Note: only supported by MySQL.
+ def add_index(table_name, column_name, options = {})
+ index_name, index_type, index_columns, index_options = add_index_options(table_name, column_name, options)
+ execute "CREATE #{index_type} INDEX #{quote_column_name(index_name)} ON #{quote_table_name(table_name)} (#{index_columns})#{index_options}"
+ end
+
+ # Removes the given index from the table.
+ #
+ # Removes the index on +branch_id+ in the +accounts+ table if exactly one such index exists.
+ #
+ # remove_index :accounts, :branch_id
+ #
+ # Removes the index on +branch_id+ in the +accounts+ table if exactly one such index exists.
+ #
+ # remove_index :accounts, column: :branch_id
+ #
+ # Removes the index on +branch_id+ and +party_id+ in the +accounts+ table if exactly one such index exists.
+ #
+ # remove_index :accounts, column: [:branch_id, :party_id]
+ #
+ # Removes the index named +by_branch_party+ in the +accounts+ table.
+ #
+ # remove_index :accounts, name: :by_branch_party
+ #
+ def remove_index(table_name, options = {})
+ index_name = index_name_for_remove(table_name, options)
+ execute "DROP INDEX #{quote_column_name(index_name)} ON #{quote_table_name(table_name)}"
+ end
+
+ # Renames an index.
+ #
+ # Rename the +index_people_on_last_name+ index to +index_users_on_last_name+:
+ #
+ # rename_index :people, 'index_people_on_last_name', 'index_users_on_last_name'
+ #
+ def rename_index(table_name, old_name, new_name)
+ validate_index_length!(table_name, new_name)
+
+ # this is a naive implementation; some DBs may support this more efficiently (Postgres, for instance)
+ old_index_def = indexes(table_name).detect { |i| i.name == old_name }
+ return unless old_index_def
+ add_index(table_name, old_index_def.columns, name: new_name, unique: old_index_def.unique)
+ remove_index(table_name, name: old_name)
+ end
+
+ def index_name(table_name, options) #:nodoc:
+ if Hash === options
+ if options[:column]
+ "index_#{table_name}_on_#{Array(options[:column]) * '_and_'}"
+ elsif options[:name]
+ options[:name]
+ else
+ raise ArgumentError, "You must specify the index name"
+ end
+ else
+ index_name(table_name, index_name_options(options))
+ end
+ end
+
+ # Verifies the existence of an index with a given name.
+ def index_name_exists?(table_name, index_name, default = nil)
+ unless default.nil?
+ ActiveSupport::Deprecation.warn(<<-MSG.squish)
+ Passing default to #index_name_exists? is deprecated without replacement.
+ MSG
+ end
+ index_name = index_name.to_s
+ indexes(table_name).detect { |i| i.name == index_name }
+ end
+
+ # Adds a reference. The reference column is an integer by default,
+ # the <tt>:type</tt> option can be used to specify a different type.
+ # Optionally adds a +_type+ column, if <tt>:polymorphic</tt> option is provided.
+ # #add_reference and #add_belongs_to are acceptable.
+ #
+ # The +options+ hash can include the following keys:
+ # [<tt>:type</tt>]
+ # The reference column type. Defaults to +:integer+.
+ # [<tt>:index</tt>]
+ # Add an appropriate index. Defaults to true.
+ # See #add_index for usage of this option.
+ # [<tt>:foreign_key</tt>]
+ # Add an appropriate foreign key constraint. Defaults to false.
+ # [<tt>:polymorphic</tt>]
+ # Whether an additional +_type+ column should be added. Defaults to false.
+ # [<tt>:null</tt>]
+ # Whether the column allows nulls. Defaults to true.
+ #
+ # ====== Create a user_id integer column
+ #
+ # add_reference(:products, :user)
+ #
+ # ====== Create a user_id string column
+ #
+ # add_reference(:products, :user, type: :string)
+ #
+ # ====== Create supplier_id, supplier_type columns and appropriate index
+ #
+ # add_reference(:products, :supplier, polymorphic: true, index: true)
+ #
+ # ====== Create a supplier_id column with a unique index
+ #
+ # add_reference(:products, :supplier, index: { unique: true })
+ #
+ # ====== Create a supplier_id column with a named index
+ #
+ # add_reference(:products, :supplier, index: { name: "my_supplier_index" })
+ #
+ # ====== Create a supplier_id column and appropriate foreign key
+ #
+ # add_reference(:products, :supplier, foreign_key: true)
+ #
+ # ====== Create a supplier_id column and a foreign key to the firms table
+ #
+ # add_reference(:products, :supplier, foreign_key: {to_table: :firms})
+ #
+ def add_reference(table_name, ref_name, **options)
+ ReferenceDefinition.new(ref_name, options).add_to(update_table_definition(table_name, self))
+ end
+ alias :add_belongs_to :add_reference
+
+ # Removes the reference(s). Also removes a +type+ column if one exists.
+ # #remove_reference and #remove_belongs_to are acceptable.
+ #
+ # ====== Remove the reference
+ #
+ # remove_reference(:products, :user, index: true)
+ #
+ # ====== Remove polymorphic reference
+ #
+ # remove_reference(:products, :supplier, polymorphic: true)
+ #
+ # ====== Remove the reference with a foreign key
+ #
+ # remove_reference(:products, :user, index: true, foreign_key: true)
+ #
+ def remove_reference(table_name, ref_name, foreign_key: false, polymorphic: false, **options)
+ if foreign_key
+ reference_name = Base.pluralize_table_names ? ref_name.to_s.pluralize : ref_name
+ if foreign_key.is_a?(Hash)
+ foreign_key_options = foreign_key
+ else
+ foreign_key_options = { to_table: reference_name }
+ end
+ foreign_key_options[:column] ||= "#{ref_name}_id"
+ remove_foreign_key(table_name, **foreign_key_options)
+ end
+
+ remove_column(table_name, "#{ref_name}_id")
+ remove_column(table_name, "#{ref_name}_type") if polymorphic
+ end
+ alias :remove_belongs_to :remove_reference
+
+ # Returns an array of foreign keys for the given table.
+ # The foreign keys are represented as ForeignKeyDefinition objects.
+ def foreign_keys(table_name)
+ raise NotImplementedError, "foreign_keys is not implemented"
+ end
+
+ # Adds a new foreign key. +from_table+ is the table with the key column,
+ # +to_table+ contains the referenced primary key.
+ #
+ # The foreign key will be named after the following pattern: <tt>fk_rails_<identifier></tt>.
+ # +identifier+ is a 10 character long string which is deterministically generated from the
+ # +from_table+ and +column+. A custom name can be specified with the <tt>:name</tt> option.
+ #
+ # ====== Creating a simple foreign key
+ #
+ # add_foreign_key :articles, :authors
+ #
+ # generates:
+ #
+ # ALTER TABLE "articles" ADD CONSTRAINT fk_rails_e74ce85cbc FOREIGN KEY ("author_id") REFERENCES "authors" ("id")
+ #
+ # ====== Creating a foreign key on a specific column
+ #
+ # add_foreign_key :articles, :users, column: :author_id, primary_key: "lng_id"
+ #
+ # generates:
+ #
+ # ALTER TABLE "articles" ADD CONSTRAINT fk_rails_58ca3d3a82 FOREIGN KEY ("author_id") REFERENCES "users" ("lng_id")
+ #
+ # ====== Creating a cascading foreign key
+ #
+ # add_foreign_key :articles, :authors, on_delete: :cascade
+ #
+ # generates:
+ #
+ # ALTER TABLE "articles" ADD CONSTRAINT fk_rails_e74ce85cbc FOREIGN KEY ("author_id") REFERENCES "authors" ("id") ON DELETE CASCADE
+ #
+ # The +options+ hash can include the following keys:
+ # [<tt>:column</tt>]
+ # The foreign key column name on +from_table+. Defaults to <tt>to_table.singularize + "_id"</tt>
+ # [<tt>:primary_key</tt>]
+ # The primary key column name on +to_table+. Defaults to +id+.
+ # [<tt>:name</tt>]
+ # The constraint name. Defaults to <tt>fk_rails_<identifier></tt>.
+ # [<tt>:on_delete</tt>]
+ # Action that happens <tt>ON DELETE</tt>. Valid values are +:nullify+, +:cascade+ and +:restrict+
+ # [<tt>:on_update</tt>]
+ # Action that happens <tt>ON UPDATE</tt>. Valid values are +:nullify+, +:cascade+ and +:restrict+
+ def add_foreign_key(from_table, to_table, options = {})
+ return unless supports_foreign_keys?
+
+ options = foreign_key_options(from_table, to_table, options)
+ at = create_alter_table from_table
+ at.add_foreign_key to_table, options
+
+ execute schema_creation.accept(at)
+ end
+
+ # Removes the given foreign key from the table. Any option parameters provided
+ # will be used to re-add the foreign key in case of a migration rollback.
+ # It is recommended that you provide any options used when creating the foreign
+ # key so that the migration can be reverted properly.
+ #
+ # Removes the foreign key on +accounts.branch_id+.
+ #
+ # remove_foreign_key :accounts, :branches
+ #
+ # Removes the foreign key on +accounts.owner_id+.
+ #
+ # remove_foreign_key :accounts, column: :owner_id
+ #
+ # Removes the foreign key named +special_fk_name+ on the +accounts+ table.
+ #
+ # remove_foreign_key :accounts, name: :special_fk_name
+ #
+ # The +options+ hash accepts the same keys as SchemaStatements#add_foreign_key.
+ def remove_foreign_key(from_table, options_or_to_table = {})
+ return unless supports_foreign_keys?
+
+ fk_name_to_delete = foreign_key_for!(from_table, options_or_to_table).name
+
+ at = create_alter_table from_table
+ at.drop_foreign_key fk_name_to_delete
+
+ execute schema_creation.accept(at)
+ end
+
+ # Checks to see if a foreign key exists on a table for a given foreign key definition.
+ #
+ # # Checks to see if a foreign key exists.
+ # foreign_key_exists?(:accounts, :branches)
+ #
+ # # Checks to see if a foreign key on a specified column exists.
+ # foreign_key_exists?(:accounts, column: :owner_id)
+ #
+ # # Checks to see if a foreign key with a custom name exists.
+ # foreign_key_exists?(:accounts, name: "special_fk_name")
+ #
+ def foreign_key_exists?(from_table, options_or_to_table = {})
+ foreign_key_for(from_table, options_or_to_table).present?
+ end
+
+ def foreign_key_column_for(table_name) # :nodoc:
+ prefix = Base.table_name_prefix
+ suffix = Base.table_name_suffix
+ name = table_name.to_s =~ /#{prefix}(.+)#{suffix}/ ? $1 : table_name.to_s
+ "#{name.singularize}_id"
+ end
+
+ def foreign_key_options(from_table, to_table, options) # :nodoc:
+ options = options.dup
+ options[:column] ||= foreign_key_column_for(to_table)
+ options[:name] ||= foreign_key_name(from_table, options)
+ options
+ end
+
+ def dump_schema_information #:nodoc:
+ versions = ActiveRecord::SchemaMigration.all_versions
+ insert_versions_sql(versions) if versions.any?
+ end
+
+ def initialize_schema_migrations_table # :nodoc:
+ ActiveRecord::SchemaMigration.create_table
+ end
+ deprecate :initialize_schema_migrations_table
+
+ def initialize_internal_metadata_table # :nodoc:
+ ActiveRecord::InternalMetadata.create_table
+ end
+ deprecate :initialize_internal_metadata_table
+
+ def internal_string_options_for_primary_key # :nodoc:
+ { primary_key: true }
+ end
+
+ def assume_migrated_upto_version(version, migrations_paths)
+ migrations_paths = Array(migrations_paths)
+ version = version.to_i
+ sm_table = quote_table_name(ActiveRecord::SchemaMigration.table_name)
+
+ migrated = ActiveRecord::SchemaMigration.all_versions.map(&:to_i)
+ versions = ActiveRecord::Migrator.migration_files(migrations_paths).map do |file|
+ ActiveRecord::Migrator.parse_migration_filename(file).first.to_i
+ end
+
+ unless migrated.include?(version)
+ execute "INSERT INTO #{sm_table} (version) VALUES (#{quote(version)})"
+ end
+
+ inserting = (versions - migrated).select { |v| v < version }
+ if inserting.any?
+ if (duplicate = inserting.detect { |v| inserting.count(v) > 1 })
+ raise "Duplicate migration #{duplicate}. Please renumber your migrations to resolve the conflict."
+ end
+ if supports_multi_insert?
+ execute insert_versions_sql(inserting)
+ else
+ inserting.each do |v|
+ execute insert_versions_sql(v)
+ end
+ end
+ end
+ end
+
+ def type_to_sql(type, limit: nil, precision: nil, scale: nil, **) # :nodoc:
+ type = type.to_sym if type
+ if native = native_database_types[type]
+ column_type_sql = (native.is_a?(Hash) ? native[:name] : native).dup
+
+ if type == :decimal # ignore limit, use precision and scale
+ scale ||= native[:scale]
+
+ if precision ||= native[:precision]
+ if scale
+ column_type_sql << "(#{precision},#{scale})"
+ else
+ column_type_sql << "(#{precision})"
+ end
+ elsif scale
+ raise ArgumentError, "Error adding decimal column: precision cannot be empty if scale is specified"
+ end
+
+ elsif [:datetime, :timestamp, :time, :interval].include?(type) && precision ||= native[:precision]
+ if (0..6) === precision
+ column_type_sql << "(#{precision})"
+ else
+ raise(ActiveRecordError, "No #{native[:name]} type has precision of #{precision}. The allowed range of precision is from 0 to 6")
+ end
+ elsif (type != :primary_key) && (limit ||= native.is_a?(Hash) && native[:limit])
+ column_type_sql << "(#{limit})"
+ end
+
+ column_type_sql
+ else
+ type.to_s
+ end
+ end
+
+ # Given a set of columns and an ORDER BY clause, returns the columns for a SELECT DISTINCT.
+ # PostgreSQL, MySQL, and Oracle override this for custom DISTINCT syntax - they
+ # require the order columns appear in the SELECT.
+ #
+ # columns_for_distinct("posts.id", ["posts.created_at desc"])
+ #
+ def columns_for_distinct(columns, orders) # :nodoc:
+ columns
+ end
+
+ # Adds timestamps (+created_at+ and +updated_at+) columns to +table_name+.
+ # Additional options (like +:null+) are forwarded to #add_column.
+ #
+ # add_timestamps(:suppliers, null: true)
+ #
+ def add_timestamps(table_name, options = {})
+ options[:null] = false if options[:null].nil?
+
+ add_column table_name, :created_at, :datetime, options
+ add_column table_name, :updated_at, :datetime, options
+ end
+
+ # Removes the timestamp columns (+created_at+ and +updated_at+) from the table definition.
+ #
+ # remove_timestamps(:suppliers)
+ #
+ def remove_timestamps(table_name, options = {})
+ remove_column table_name, :updated_at
+ remove_column table_name, :created_at
+ end
+
+ def update_table_definition(table_name, base) #:nodoc:
+ Table.new(table_name, base)
+ end
+
+ def add_index_options(table_name, column_name, comment: nil, **options) # :nodoc:
+ column_names = index_column_names(column_name)
+
+ options.assert_valid_keys(:unique, :order, :name, :where, :length, :internal, :using, :algorithm, :type)
+
+ index_type = options[:type].to_s if options.key?(:type)
+ index_type ||= options[:unique] ? "UNIQUE" : ""
+ index_name = options[:name].to_s if options.key?(:name)
+ index_name ||= index_name(table_name, column_names)
+
+ if options.key?(:algorithm)
+ algorithm = index_algorithms.fetch(options[:algorithm]) {
+ raise ArgumentError.new("Algorithm must be one of the following: #{index_algorithms.keys.map(&:inspect).join(', ')}")
+ }
+ end
+
+ using = "USING #{options[:using]}" if options[:using].present?
+
+ if supports_partial_index?
+ index_options = options[:where] ? " WHERE #{options[:where]}" : ""
+ end
+
+ validate_index_length!(table_name, index_name, options.fetch(:internal, false))
+
+ if data_source_exists?(table_name) && index_name_exists?(table_name, index_name)
+ raise ArgumentError, "Index name '#{index_name}' on table '#{table_name}' already exists"
+ end
+ index_columns = quoted_columns_for_index(column_names, options).join(", ")
+
+ [index_name, index_type, index_columns, index_options, algorithm, using, comment]
+ end
+
+ def options_include_default?(options)
+ options.include?(:default) && !(options[:null] == false && options[:default].nil?)
+ end
+
+ # Changes the comment for a table or removes it if +nil+.
+ def change_table_comment(table_name, comment)
+ raise NotImplementedError, "#{self.class} does not support changing table comments"
+ end
+
+ # Changes the comment for a column or removes it if +nil+.
+ def change_column_comment(table_name, column_name, comment) #:nodoc:
+ raise NotImplementedError, "#{self.class} does not support changing column comments"
+ end
+
+ private
+ def column_options_keys
+ [:limit, :precision, :scale, :default, :null, :collation, :comment]
+ end
+
+ def add_index_sort_order(quoted_columns, **options)
+ if order = options[:order]
+ case order
+ when Hash
+ order = order.symbolize_keys
+ quoted_columns.each { |name, column| column << " #{order[name].upcase}" if order[name].present? }
+ when String
+ quoted_columns.each { |name, column| column << " #{order.upcase}" if order.present? }
+ end
+ end
+
+ quoted_columns
+ end
+
+ # Overridden by the MySQL adapter for supporting index lengths
+ def add_options_for_index_columns(quoted_columns, **options)
+ if supports_index_sort_order?
+ quoted_columns = add_index_sort_order(quoted_columns, options)
+ end
+
+ quoted_columns
+ end
+
+ def quoted_columns_for_index(column_names, **options)
+ return [column_names] if column_names.is_a?(String)
+
+ quoted_columns = Hash[column_names.map { |name| [name.to_sym, quote_column_name(name).dup] }]
+ add_options_for_index_columns(quoted_columns, options).values
+ end
+
+ def index_name_for_remove(table_name, options = {})
+ return options[:name] if can_remove_index_by_name?(options)
+
+ checks = []
+
+ if options.is_a?(Hash)
+ checks << lambda { |i| i.name == options[:name].to_s } if options.key?(:name)
+ column_names = index_column_names(options[:column])
+ else
+ column_names = index_column_names(options)
+ end
+
+ if column_names.present?
+ checks << lambda { |i| index_name(table_name, i.columns) == index_name(table_name, column_names) }
+ end
+
+ raise ArgumentError, "No name or columns specified" if checks.none?
+
+ matching_indexes = indexes(table_name).select { |i| checks.all? { |check| check[i] } }
+
+ if matching_indexes.count > 1
+ raise ArgumentError, "Multiple indexes found on #{table_name} columns #{column_names}. " \
+ "Specify an index name from #{matching_indexes.map(&:name).join(', ')}"
+ elsif matching_indexes.none?
+ raise ArgumentError, "No indexes found on #{table_name} with the options provided."
+ else
+ matching_indexes.first.name
+ end
+ end
+
+ def rename_table_indexes(table_name, new_name)
+ indexes(new_name).each do |index|
+ generated_index_name = index_name(table_name, column: index.columns)
+ if generated_index_name == index.name
+ rename_index new_name, generated_index_name, index_name(new_name, column: index.columns)
+ end
+ end
+ end
+
+ def rename_column_indexes(table_name, column_name, new_column_name)
+ column_name, new_column_name = column_name.to_s, new_column_name.to_s
+ indexes(table_name).each do |index|
+ next unless index.columns.include?(new_column_name)
+ old_columns = index.columns.dup
+ old_columns[old_columns.index(new_column_name)] = column_name
+ generated_index_name = index_name(table_name, column: old_columns)
+ if generated_index_name == index.name
+ rename_index table_name, generated_index_name, index_name(table_name, column: index.columns)
+ end
+ end
+ end
+
+ def schema_creation
+ SchemaCreation.new(self)
+ end
+
+ def create_table_definition(*args)
+ TableDefinition.new(*args)
+ end
+
+ def create_alter_table(name)
+ AlterTable.new create_table_definition(name)
+ end
+
+ def fetch_type_metadata(sql_type)
+ cast_type = lookup_cast_type(sql_type)
+ SqlTypeMetadata.new(
+ sql_type: sql_type,
+ type: cast_type.type,
+ limit: cast_type.limit,
+ precision: cast_type.precision,
+ scale: cast_type.scale,
+ )
+ end
+
+ def index_column_names(column_names)
+ if column_names.is_a?(String) && /\W/.match?(column_names)
+ column_names
+ else
+ Array(column_names)
+ end
+ end
+
+ def index_name_options(column_names)
+ if column_names.is_a?(String) && /\W/.match?(column_names)
+ column_names = column_names.scan(/\w+/).join("_")
+ end
+
+ { column: column_names }
+ end
+
+ def foreign_key_name(table_name, options)
+ options.fetch(:name) do
+ identifier = "#{table_name}_#{options.fetch(:column)}_fk"
+ hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10)
+
+ "fk_rails_#{hashed_identifier}"
+ end
+ end
+
+ def foreign_key_for(from_table, options_or_to_table = {})
+ return unless supports_foreign_keys?
+ foreign_keys(from_table).detect { |fk| fk.defined_for? options_or_to_table }
+ end
+
+ def foreign_key_for!(from_table, options_or_to_table = {})
+ foreign_key_for(from_table, options_or_to_table) || \
+ raise(ArgumentError, "Table '#{from_table}' has no foreign key for #{options_or_to_table}")
+ end
+
+ def extract_foreign_key_action(specifier)
+ case specifier
+ when "CASCADE"; :cascade
+ when "SET NULL"; :nullify
+ when "RESTRICT"; :restrict
+ end
+ end
+
+ def validate_index_length!(table_name, new_name, internal = false)
+ max_index_length = internal ? index_name_length : allowed_index_name_length
+
+ if new_name.length > max_index_length
+ raise ArgumentError, "Index name '#{new_name}' on table '#{table_name}' is too long; the limit is #{allowed_index_name_length} characters"
+ end
+ end
+
+ def extract_new_default_value(default_or_changes)
+ if default_or_changes.is_a?(Hash) && default_or_changes.has_key?(:from) && default_or_changes.has_key?(:to)
+ default_or_changes[:to]
+ else
+ default_or_changes
+ end
+ end
+
+ def can_remove_index_by_name?(options)
+ options.is_a?(Hash) && options.key?(:name) && options.except(:name, :algorithm).empty?
+ end
+
+ def insert_versions_sql(versions)
+ sm_table = quote_table_name(ActiveRecord::SchemaMigration.table_name)
+
+ if versions.is_a?(Array)
+ sql = "INSERT INTO #{sm_table} (version) VALUES\n".dup
+ sql << versions.map { |v| "(#{quote(v)})" }.join(",\n")
+ sql << ";\n\n"
+ sql
+ else
+ "INSERT INTO #{sm_table} (version) VALUES (#{quote(versions)});"
+ end
+ end
+
+ def data_source_sql(name = nil, type: nil)
+ raise NotImplementedError
+ end
+
+ def quoted_scope(name = nil, type: nil)
+ raise NotImplementedError
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/transaction.rb b/activerecord/lib/active_record/connection_adapters/abstract/transaction.rb
new file mode 100644
index 0000000000..147e16e9fa
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/transaction.rb
@@ -0,0 +1,274 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ class TransactionState
+ def initialize(state = nil)
+ @state = state
+ @children = []
+ end
+
+ def add_child(state)
+ @children << state
+ end
+
+ def finalized?
+ @state
+ end
+
+ def committed?
+ @state == :committed
+ end
+
+ def rolledback?
+ @state == :rolledback
+ end
+
+ def fully_completed?
+ completed?
+ end
+
+ def completed?
+ committed? || rolledback?
+ end
+
+ def set_state(state)
+ ActiveSupport::Deprecation.warn(<<-MSG.squish)
+ The set_state method is deprecated and will be removed in
+ Rails 6.0. Please use rollback! or commit! to set transaction
+ state directly.
+ MSG
+ case state
+ when :rolledback
+ rollback!
+ when :committed
+ commit!
+ when nil
+ nullify!
+ else
+ raise ArgumentError, "Invalid transaction state: #{state}"
+ end
+ end
+
+ def rollback!
+ @children.each { |c| c.rollback! }
+ @state = :rolledback
+ end
+
+ def commit!
+ @state = :committed
+ end
+
+ def nullify!
+ @state = nil
+ end
+ end
+
+ class NullTransaction #:nodoc:
+ def initialize; end
+ def state; end
+ def closed?; true; end
+ def open?; false; end
+ def joinable?; false; end
+ def add_record(record); end
+ end
+
+ class Transaction #:nodoc:
+ attr_reader :connection, :state, :records, :savepoint_name
+ attr_writer :joinable
+
+ def initialize(connection, options, run_commit_callbacks: false)
+ @connection = connection
+ @state = TransactionState.new
+ @records = []
+ @joinable = options.fetch(:joinable, true)
+ @run_commit_callbacks = run_commit_callbacks
+ end
+
+ def add_record(record)
+ records << record
+ end
+
+ def rollback
+ @state.rollback!
+ end
+
+ def rollback_records
+ ite = records.uniq
+ while record = ite.shift
+ record.rolledback!(force_restore_state: full_rollback?)
+ end
+ ensure
+ ite.each do |i|
+ i.rolledback!(force_restore_state: full_rollback?, should_run_callbacks: false)
+ end
+ end
+
+ def commit
+ @state.commit!
+ end
+
+ def before_commit_records
+ records.uniq.each(&:before_committed!) if @run_commit_callbacks
+ end
+
+ def commit_records
+ ite = records.uniq
+ while record = ite.shift
+ if @run_commit_callbacks
+ record.committed!
+ else
+ # if not running callbacks, only adds the record to the parent transaction
+ record.add_to_transaction
+ end
+ end
+ ensure
+ ite.each { |i| i.committed!(should_run_callbacks: false) }
+ end
+
+ def full_rollback?; true; end
+ def joinable?; @joinable; end
+ def closed?; false; end
+ def open?; !closed?; end
+ end
+
+ class SavepointTransaction < Transaction
+ def initialize(connection, savepoint_name, parent_transaction, options, *args)
+ super(connection, options, *args)
+
+ parent_transaction.state.add_child(@state)
+
+ if options[:isolation]
+ raise ActiveRecord::TransactionIsolationError, "cannot set transaction isolation in a nested transaction"
+ end
+ connection.create_savepoint(@savepoint_name = savepoint_name)
+ end
+
+ def rollback
+ connection.rollback_to_savepoint(savepoint_name)
+ super
+ end
+
+ def commit
+ connection.release_savepoint(savepoint_name)
+ super
+ end
+
+ def full_rollback?; false; end
+ end
+
+ class RealTransaction < Transaction
+ def initialize(connection, options, *args)
+ super
+ if options[:isolation]
+ connection.begin_isolated_db_transaction(options[:isolation])
+ else
+ connection.begin_db_transaction
+ end
+ end
+
+ def rollback
+ connection.rollback_db_transaction
+ super
+ end
+
+ def commit
+ connection.commit_db_transaction
+ super
+ end
+ end
+
+ class TransactionManager #:nodoc:
+ def initialize(connection)
+ @stack = []
+ @connection = connection
+ end
+
+ def begin_transaction(options = {})
+ @connection.lock.synchronize do
+ run_commit_callbacks = !current_transaction.joinable?
+ transaction =
+ if @stack.empty?
+ RealTransaction.new(@connection, options, run_commit_callbacks: run_commit_callbacks)
+ else
+ SavepointTransaction.new(@connection, "active_record_#{@stack.size}", @stack.last, options,
+ run_commit_callbacks: run_commit_callbacks)
+ end
+
+ @stack.push(transaction)
+ transaction
+ end
+ end
+
+ def commit_transaction
+ @connection.lock.synchronize do
+ transaction = @stack.last
+
+ begin
+ transaction.before_commit_records
+ ensure
+ @stack.pop
+ end
+
+ transaction.commit
+ transaction.commit_records
+ end
+ end
+
+ def rollback_transaction(transaction = nil)
+ @connection.lock.synchronize do
+ transaction ||= @stack.pop
+ transaction.rollback
+ transaction.rollback_records
+ end
+ end
+
+ def within_new_transaction(options = {})
+ @connection.lock.synchronize do
+ begin
+ transaction = begin_transaction options
+ yield
+ rescue Exception => error
+ if transaction
+ rollback_transaction
+ after_failure_actions(transaction, error)
+ end
+ raise
+ ensure
+ unless error
+ if Thread.current.status == "aborting"
+ rollback_transaction if transaction
+ else
+ begin
+ commit_transaction
+ rescue Exception
+ rollback_transaction(transaction) unless transaction.state.completed?
+ raise
+ end
+ end
+ end
+ end
+ end
+ end
+
+ def open_transactions
+ @stack.size
+ end
+
+ def current_transaction
+ @stack.last || NULL_TRANSACTION
+ end
+
+ private
+
+ NULL_TRANSACTION = NullTransaction.new
+
+ # Deallocate invalidated prepared statements outside of the transaction
+ def after_failure_actions(transaction, error)
+ return unless transaction.is_a?(RealTransaction)
+ return unless error.is_a?(ActiveRecord::PreparedStatementCacheExpired)
+ @connection.clear_cache!
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract_adapter.rb b/activerecord/lib/active_record/connection_adapters/abstract_adapter.rb
new file mode 100644
index 0000000000..7645cf7825
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract_adapter.rb
@@ -0,0 +1,600 @@
+# frozen_string_literal: true
+
+require_relative "../type"
+require_relative "determine_if_preparable_visitor"
+require_relative "schema_cache"
+require_relative "sql_type_metadata"
+require_relative "abstract/schema_dumper"
+require_relative "abstract/schema_creation"
+require "arel/collectors/bind"
+require "arel/collectors/composite"
+require "arel/collectors/sql_string"
+require "arel/collectors/substitute_binds"
+
+module ActiveRecord
+ module ConnectionAdapters # :nodoc:
+ extend ActiveSupport::Autoload
+
+ autoload :Column
+ autoload :ConnectionSpecification
+
+ autoload_at "active_record/connection_adapters/abstract/schema_definitions" do
+ autoload :IndexDefinition
+ autoload :ColumnDefinition
+ autoload :ChangeColumnDefinition
+ autoload :ForeignKeyDefinition
+ autoload :TableDefinition
+ autoload :Table
+ autoload :AlterTable
+ autoload :ReferenceDefinition
+ end
+
+ autoload_at "active_record/connection_adapters/abstract/connection_pool" do
+ autoload :ConnectionHandler
+ end
+
+ autoload_under "abstract" do
+ autoload :SchemaStatements
+ autoload :DatabaseStatements
+ autoload :DatabaseLimits
+ autoload :Quoting
+ autoload :ConnectionPool
+ autoload :QueryCache
+ autoload :Savepoints
+ end
+
+ autoload_at "active_record/connection_adapters/abstract/transaction" do
+ autoload :TransactionManager
+ autoload :NullTransaction
+ autoload :RealTransaction
+ autoload :SavepointTransaction
+ autoload :TransactionState
+ end
+
+ # Active Record supports multiple database systems. AbstractAdapter and
+ # related classes form the abstraction layer which makes this possible.
+ # An AbstractAdapter represents a connection to a database, and provides an
+ # abstract interface for database-specific functionality such as establishing
+ # a connection, escaping values, building the right SQL fragments for +:offset+
+ # and +:limit+ options, etc.
+ #
+ # All the concrete database adapters follow the interface laid down in this class.
+ # {ActiveRecord::Base.connection}[rdoc-ref:ConnectionHandling#connection] returns an AbstractAdapter object, which
+ # you can use.
+ #
+ # Most of the methods in the adapter are useful during migrations. Most
+ # notably, the instance methods provided by SchemaStatements are very useful.
+ class AbstractAdapter
+ ADAPTER_NAME = "Abstract".freeze
+ include ActiveSupport::Callbacks
+ define_callbacks :checkout, :checkin
+
+ include Quoting, DatabaseStatements, SchemaStatements
+ include DatabaseLimits
+ include QueryCache
+ include ColumnDumper
+ include Savepoints
+
+ SIMPLE_INT = /\A\d+\z/
+
+ attr_accessor :visitor, :pool
+ attr_reader :schema_cache, :owner, :logger, :prepared_statements, :lock
+ alias :in_use? :owner
+
+ def self.type_cast_config_to_integer(config)
+ if config =~ SIMPLE_INT
+ config.to_i
+ else
+ config
+ end
+ end
+
+ def self.type_cast_config_to_boolean(config)
+ if config == "false"
+ false
+ else
+ config
+ end
+ end
+
+ def initialize(connection, logger = nil, config = {}) # :nodoc:
+ super()
+
+ @connection = connection
+ @owner = nil
+ @instrumenter = ActiveSupport::Notifications.instrumenter
+ @logger = logger
+ @config = config
+ @pool = nil
+ @schema_cache = SchemaCache.new self
+ @quoted_column_names, @quoted_table_names = {}, {}
+ @visitor = arel_visitor
+ @lock = Monitor.new
+
+ if self.class.type_cast_config_to_boolean(config.fetch(:prepared_statements) { true })
+ @prepared_statements = true
+ @visitor.extend(DetermineIfPreparableVisitor)
+ else
+ @prepared_statements = false
+ end
+ end
+
+ class Version
+ include Comparable
+
+ def initialize(version_string)
+ @version = version_string.split(".").map(&:to_i)
+ end
+
+ def <=>(version_string)
+ @version <=> version_string.split(".").map(&:to_i)
+ end
+ end
+
+ def valid_type?(type) # :nodoc:
+ !native_database_types[type].nil?
+ end
+
+ # this method must only be called while holding connection pool's mutex
+ def lease
+ if in_use?
+ msg = "Cannot lease connection, ".dup
+ if @owner == Thread.current
+ msg << "it is already leased by the current thread."
+ else
+ msg << "it is already in use by a different thread: #{@owner}. " \
+ "Current thread: #{Thread.current}."
+ end
+ raise ActiveRecordError, msg
+ end
+
+ @owner = Thread.current
+ end
+
+ def schema_cache=(cache)
+ cache.connection = self
+ @schema_cache = cache
+ end
+
+ # this method must only be called while holding connection pool's mutex
+ def expire
+ if in_use?
+ if @owner != Thread.current
+ raise ActiveRecordError, "Cannot expire connection, " \
+ "it is owned by a different thread: #{@owner}. " \
+ "Current thread: #{Thread.current}."
+ end
+
+ @owner = nil
+ else
+ raise ActiveRecordError, "Cannot expire connection, it is not currently leased."
+ end
+ end
+
+ # this method must only be called while holding connection pool's mutex (and a desire for segfaults)
+ def steal! # :nodoc:
+ if in_use?
+ if @owner != Thread.current
+ pool.send :remove_connection_from_thread_cache, self, @owner
+
+ @owner = Thread.current
+ end
+ else
+ raise ActiveRecordError, "Cannot steal connection, it is not currently leased."
+ end
+ end
+
+ def unprepared_statement
+ old_prepared_statements, @prepared_statements = @prepared_statements, false
+ yield
+ ensure
+ @prepared_statements = old_prepared_statements
+ end
+
+ # Returns the human-readable name of the adapter. Use mixed case - one
+ # can always use downcase if needed.
+ def adapter_name
+ self.class::ADAPTER_NAME
+ end
+
+ def supports_migrations? # :nodoc:
+ true
+ end
+ deprecate :supports_migrations?
+
+ def supports_primary_key? # :nodoc:
+ true
+ end
+ deprecate :supports_primary_key?
+
+ # Does this adapter support DDL rollbacks in transactions? That is, would
+ # CREATE TABLE or ALTER TABLE get rolled back by a transaction?
+ def supports_ddl_transactions?
+ false
+ end
+
+ def supports_bulk_alter?
+ false
+ end
+
+ # Does this adapter support savepoints?
+ def supports_savepoints?
+ false
+ end
+
+ # Does this adapter support application-enforced advisory locking?
+ def supports_advisory_locks?
+ false
+ end
+
+ # Should primary key values be selected from their corresponding
+ # sequence before the insert statement? If true, next_sequence_value
+ # is called before each insert to set the record's primary key.
+ def prefetch_primary_key?(table_name = nil)
+ false
+ end
+
+ # Does this adapter support index sort order?
+ def supports_index_sort_order?
+ false
+ end
+
+ # Does this adapter support partial indices?
+ def supports_partial_index?
+ false
+ end
+
+ # Does this adapter support expression indices?
+ def supports_expression_index?
+ false
+ end
+
+ # Does this adapter support explain?
+ def supports_explain?
+ false
+ end
+
+ # Does this adapter support setting the isolation level for a transaction?
+ def supports_transaction_isolation?
+ false
+ end
+
+ # Does this adapter support database extensions?
+ def supports_extensions?
+ false
+ end
+
+ # Does this adapter support creating indexes in the same statement as
+ # creating the table?
+ def supports_indexes_in_create?
+ false
+ end
+
+ # Does this adapter support creating foreign key constraints?
+ def supports_foreign_keys?
+ false
+ end
+
+ # Does this adapter support creating foreign key constraints
+ # in the same statement as creating the table?
+ def supports_foreign_keys_in_create?
+ supports_foreign_keys?
+ end
+
+ # Does this adapter support views?
+ def supports_views?
+ false
+ end
+
+ # Does this adapter support datetime with precision?
+ def supports_datetime_with_precision?
+ false
+ end
+
+ # Does this adapter support json data type?
+ def supports_json?
+ false
+ end
+
+ # Does this adapter support metadata comments on database objects (tables, columns, indexes)?
+ def supports_comments?
+ false
+ end
+
+ # Can comments for tables, columns, and indexes be specified in create/alter table statements?
+ def supports_comments_in_create?
+ false
+ end
+
+ # Does this adapter support multi-value insert?
+ def supports_multi_insert?
+ true
+ end
+
+ # Does this adapter support virtual columns?
+ def supports_virtual_columns?
+ false
+ end
+
+ # This is meant to be implemented by the adapters that support extensions
+ def disable_extension(name)
+ end
+
+ # This is meant to be implemented by the adapters that support extensions
+ def enable_extension(name)
+ end
+
+ # This is meant to be implemented by the adapters that support advisory
+ # locks
+ #
+ # Return true if we got the lock, otherwise false
+ def get_advisory_lock(lock_id) # :nodoc:
+ end
+
+ # This is meant to be implemented by the adapters that support advisory
+ # locks.
+ #
+ # Return true if we released the lock, otherwise false
+ def release_advisory_lock(lock_id) # :nodoc:
+ end
+
+ # A list of extensions, to be filled in by adapters that support them.
+ def extensions
+ []
+ end
+
+ # A list of index algorithms, to be filled by adapters that support them.
+ def index_algorithms
+ {}
+ end
+
+ # REFERENTIAL INTEGRITY ====================================
+
+ # Override to turn off referential integrity while executing <tt>&block</tt>.
+ def disable_referential_integrity
+ yield
+ end
+
+ # CONNECTION MANAGEMENT ====================================
+
+ # Checks whether the connection to the database is still active. This includes
+ # checking whether the database is actually capable of responding, i.e. whether
+ # the connection isn't stale.
+ def active?
+ end
+
+ # Disconnects from the database if already connected, and establishes a
+ # new connection with the database. Implementors should call super if they
+ # override the default implementation.
+ def reconnect!
+ clear_cache!
+ reset_transaction
+ end
+
+ # Disconnects from the database if already connected. Otherwise, this
+ # method does nothing.
+ def disconnect!
+ clear_cache!
+ reset_transaction
+ end
+
+ # Reset the state of this connection, directing the DBMS to clear
+ # transactions and other connection-related server-side state. Usually a
+ # database-dependent operation.
+ #
+ # The default implementation does nothing; the implementation should be
+ # overridden by concrete adapters.
+ def reset!
+ # this should be overridden by concrete adapters
+ end
+
+ ###
+ # Clear any caching the database adapter may be doing, for example
+ # clearing the prepared statement cache. This is database specific.
+ def clear_cache!
+ # this should be overridden by concrete adapters
+ end
+
+ # Returns true if its required to reload the connection between requests for development mode.
+ def requires_reloading?
+ false
+ end
+
+ # Checks whether the connection to the database is still active (i.e. not stale).
+ # This is done under the hood by calling #active?. If the connection
+ # is no longer active, then this method will reconnect to the database.
+ def verify!(*ignored)
+ if ignored.size > 0
+ ActiveSupport::Deprecation.warn("Passing arguments to #verify method of the connection has no effect and has been deprecated. Please remove all arguments from the #verify method call.")
+ end
+ reconnect! unless active?
+ end
+
+ # Provides access to the underlying database driver for this adapter. For
+ # example, this method returns a Mysql2::Client object in case of Mysql2Adapter,
+ # and a PG::Connection object in case of PostgreSQLAdapter.
+ #
+ # This is useful for when you need to call a proprietary method such as
+ # PostgreSQL's lo_* methods.
+ def raw_connection
+ @connection
+ end
+
+ def case_sensitive_comparison(table, attribute, column, value) # :nodoc:
+ table[attribute].eq(value)
+ end
+
+ def case_insensitive_comparison(table, attribute, column, value) # :nodoc:
+ if can_perform_case_insensitive_comparison_for?(column)
+ table[attribute].lower.eq(table.lower(value))
+ else
+ table[attribute].eq(value)
+ end
+ end
+
+ def can_perform_case_insensitive_comparison_for?(column)
+ true
+ end
+ private :can_perform_case_insensitive_comparison_for?
+
+ # Check the connection back in to the connection pool
+ def close
+ pool.checkin self
+ end
+
+ def column_name_for_operation(operation, node) # :nodoc:
+ visitor.accept(node, collector).value
+ end
+
+ def default_index_type?(index) # :nodoc:
+ index.using.nil?
+ end
+
+ private
+ def type_map
+ @type_map ||= Type::TypeMap.new.tap do |mapping|
+ initialize_type_map(mapping)
+ end
+ end
+
+ def initialize_type_map(m = type_map)
+ register_class_with_limit m, %r(boolean)i, Type::Boolean
+ register_class_with_limit m, %r(char)i, Type::String
+ register_class_with_limit m, %r(binary)i, Type::Binary
+ register_class_with_limit m, %r(text)i, Type::Text
+ register_class_with_precision m, %r(date)i, Type::Date
+ register_class_with_precision m, %r(time)i, Type::Time
+ register_class_with_precision m, %r(datetime)i, Type::DateTime
+ register_class_with_limit m, %r(float)i, Type::Float
+ register_class_with_limit m, %r(int)i, Type::Integer
+
+ m.alias_type %r(blob)i, "binary"
+ m.alias_type %r(clob)i, "text"
+ m.alias_type %r(timestamp)i, "datetime"
+ m.alias_type %r(numeric)i, "decimal"
+ m.alias_type %r(number)i, "decimal"
+ m.alias_type %r(double)i, "float"
+
+ m.register_type(%r(decimal)i) do |sql_type|
+ scale = extract_scale(sql_type)
+ precision = extract_precision(sql_type)
+
+ if scale == 0
+ # FIXME: Remove this class as well
+ Type::DecimalWithoutScale.new(precision: precision)
+ else
+ Type::Decimal.new(precision: precision, scale: scale)
+ end
+ end
+ end
+
+ def reload_type_map
+ type_map.clear
+ initialize_type_map
+ end
+
+ def register_class_with_limit(mapping, key, klass)
+ mapping.register_type(key) do |*args|
+ limit = extract_limit(args.last)
+ klass.new(limit: limit)
+ end
+ end
+
+ def register_class_with_precision(mapping, key, klass)
+ mapping.register_type(key) do |*args|
+ precision = extract_precision(args.last)
+ klass.new(precision: precision)
+ end
+ end
+
+ def extract_scale(sql_type)
+ case sql_type
+ when /\((\d+)\)/ then 0
+ when /\((\d+)(,(\d+))\)/ then $3.to_i
+ end
+ end
+
+ def extract_precision(sql_type)
+ $1.to_i if sql_type =~ /\((\d+)(,\d+)?\)/
+ end
+
+ def extract_limit(sql_type)
+ case sql_type
+ when /^bigint/i
+ 8
+ when /\((.*)\)/
+ $1.to_i
+ end
+ end
+
+ def translate_exception_class(e, sql)
+ begin
+ message = "#{e.class.name}: #{e.message}: #{sql}"
+ rescue Encoding::CompatibilityError
+ message = "#{e.class.name}: #{e.message.force_encoding sql.encoding}: #{sql}"
+ end
+
+ exception = translate_exception(e, message)
+ exception.set_backtrace e.backtrace
+ exception
+ end
+
+ def log(sql, name = "SQL", binds = [], type_casted_binds = [], statement_name = nil) # :doc:
+ @instrumenter.instrument(
+ "sql.active_record",
+ sql: sql,
+ name: name,
+ binds: binds,
+ type_casted_binds: type_casted_binds,
+ statement_name: statement_name,
+ connection_id: object_id) do
+ begin
+ @lock.synchronize do
+ yield
+ end
+ rescue => e
+ raise translate_exception_class(e, sql)
+ end
+ end
+ end
+
+ def translate_exception(exception, message)
+ # override in derived class
+ case exception
+ when RuntimeError
+ exception
+ else
+ ActiveRecord::StatementInvalid.new(message)
+ end
+ end
+
+ def without_prepared_statement?(binds)
+ !prepared_statements || binds.empty?
+ end
+
+ def column_for(table_name, column_name)
+ column_name = column_name.to_s
+ columns(table_name).detect { |c| c.name == column_name } ||
+ raise(ActiveRecordError, "No such column: #{table_name}.#{column_name}")
+ end
+
+ def collector
+ if prepared_statements
+ Arel::Collectors::Composite.new(
+ Arel::Collectors::SQLString.new,
+ Arel::Collectors::Bind.new,
+ )
+ else
+ Arel::Collectors::SubstituteBinds.new(
+ self,
+ Arel::Collectors::SQLString.new,
+ )
+ end
+ end
+
+ def arel_visitor
+ Arel::Visitors::ToSql.new(self)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb b/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb
new file mode 100644
index 0000000000..8a9c497918
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb
@@ -0,0 +1,888 @@
+# frozen_string_literal: true
+
+require_relative "abstract_adapter"
+require_relative "statement_pool"
+require_relative "mysql/column"
+require_relative "mysql/explain_pretty_printer"
+require_relative "mysql/quoting"
+require_relative "mysql/schema_creation"
+require_relative "mysql/schema_definitions"
+require_relative "mysql/schema_dumper"
+require_relative "mysql/schema_statements"
+require_relative "mysql/type_metadata"
+
+require "active_support/core_ext/string/strip"
+
+module ActiveRecord
+ module ConnectionAdapters
+ class AbstractMysqlAdapter < AbstractAdapter
+ include MySQL::Quoting
+ include MySQL::ColumnDumper
+ include MySQL::SchemaStatements
+
+ def update_table_definition(table_name, base) # :nodoc:
+ MySQL::Table.new(table_name, base)
+ end
+
+ ##
+ # :singleton-method:
+ # By default, the Mysql2Adapter will consider all columns of type <tt>tinyint(1)</tt>
+ # as boolean. If you wish to disable this emulation you can add the following line
+ # to your application.rb file:
+ #
+ # ActiveRecord::ConnectionAdapters::Mysql2Adapter.emulate_booleans = false
+ class_attribute :emulate_booleans, default: true
+
+ NATIVE_DATABASE_TYPES = {
+ primary_key: "bigint auto_increment PRIMARY KEY",
+ string: { name: "varchar", limit: 255 },
+ text: { name: "text", limit: 65535 },
+ integer: { name: "int", limit: 4 },
+ float: { name: "float" },
+ decimal: { name: "decimal" },
+ datetime: { name: "datetime" },
+ timestamp: { name: "timestamp" },
+ time: { name: "time" },
+ date: { name: "date" },
+ binary: { name: "blob", limit: 65535 },
+ boolean: { name: "tinyint", limit: 1 },
+ json: { name: "json" },
+ }
+
+ class StatementPool < ConnectionAdapters::StatementPool
+ private def dealloc(stmt)
+ stmt[:stmt].close
+ end
+ end
+
+ def initialize(connection, logger, connection_options, config)
+ super(connection, logger, config)
+
+ @statements = StatementPool.new(self.class.type_cast_config_to_integer(config[:statement_limit]))
+
+ if version < "5.1.10"
+ raise "Your version of MySQL (#{version_string}) is too old. Active Record supports MySQL >= 5.1.10."
+ end
+ end
+
+ def version #:nodoc:
+ @version ||= Version.new(version_string)
+ end
+
+ def mariadb? # :nodoc:
+ /mariadb/i.match?(full_version)
+ end
+
+ def supports_bulk_alter? #:nodoc:
+ true
+ end
+
+ def supports_index_sort_order?
+ !mariadb? && version >= "8.0.1"
+ end
+
+ def supports_transaction_isolation?
+ true
+ end
+
+ def supports_explain?
+ true
+ end
+
+ def supports_indexes_in_create?
+ true
+ end
+
+ def supports_foreign_keys?
+ true
+ end
+
+ def supports_views?
+ true
+ end
+
+ def supports_datetime_with_precision?
+ if mariadb?
+ version >= "5.3.0"
+ else
+ version >= "5.6.4"
+ end
+ end
+
+ def supports_virtual_columns?
+ if mariadb?
+ version >= "5.2.0"
+ else
+ version >= "5.7.5"
+ end
+ end
+
+ def supports_advisory_locks?
+ true
+ end
+
+ def get_advisory_lock(lock_name, timeout = 0) # :nodoc:
+ query_value("SELECT GET_LOCK(#{quote(lock_name)}, #{timeout})") == 1
+ end
+
+ def release_advisory_lock(lock_name) # :nodoc:
+ query_value("SELECT RELEASE_LOCK(#{quote(lock_name)})") == 1
+ end
+
+ def native_database_types
+ NATIVE_DATABASE_TYPES
+ end
+
+ def index_algorithms
+ { default: "ALGORITHM = DEFAULT".dup, copy: "ALGORITHM = COPY".dup, inplace: "ALGORITHM = INPLACE".dup }
+ end
+
+ # HELPER METHODS ===========================================
+
+ # The two drivers have slightly different ways of yielding hashes of results, so
+ # this method must be implemented to provide a uniform interface.
+ def each_hash(result) # :nodoc:
+ raise NotImplementedError
+ end
+
+ # Must return the MySQL error number from the exception, if the exception has an
+ # error number.
+ def error_number(exception) # :nodoc:
+ raise NotImplementedError
+ end
+
+ # REFERENTIAL INTEGRITY ====================================
+
+ def disable_referential_integrity #:nodoc:
+ old = query_value("SELECT @@FOREIGN_KEY_CHECKS")
+
+ begin
+ update("SET FOREIGN_KEY_CHECKS = 0")
+ yield
+ ensure
+ update("SET FOREIGN_KEY_CHECKS = #{old}")
+ end
+ end
+
+ # CONNECTION MANAGEMENT ====================================
+
+ # Clears the prepared statements cache.
+ def clear_cache!
+ reload_type_map
+ @statements.clear
+ end
+
+ #--
+ # DATABASE STATEMENTS ======================================
+ #++
+
+ def explain(arel, binds = [])
+ sql, binds = to_sql(arel, binds)
+ sql = "EXPLAIN #{sql}"
+ start = Time.now
+ result = exec_query(sql, "EXPLAIN", binds)
+ elapsed = Time.now - start
+
+ MySQL::ExplainPrettyPrinter.new.pp(result, elapsed)
+ end
+
+ # Executes the SQL statement in the context of this connection.
+ def execute(sql, name = nil)
+ log(sql, name) do
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ @connection.query(sql)
+ end
+ end
+ end
+
+ # Mysql2Adapter doesn't have to free a result after using it, but we use this method
+ # to write stuff in an abstract way without concerning ourselves about whether it
+ # needs to be explicitly freed or not.
+ def execute_and_free(sql, name = nil) # :nodoc:
+ yield execute(sql, name)
+ end
+
+ def begin_db_transaction
+ execute "BEGIN"
+ end
+
+ def begin_isolated_db_transaction(isolation)
+ execute "SET TRANSACTION ISOLATION LEVEL #{transaction_isolation_levels.fetch(isolation)}"
+ begin_db_transaction
+ end
+
+ def commit_db_transaction #:nodoc:
+ execute "COMMIT"
+ end
+
+ def exec_rollback_db_transaction #:nodoc:
+ execute "ROLLBACK"
+ end
+
+ # In the simple case, MySQL allows us to place JOINs directly into the UPDATE
+ # query. However, this does not allow for LIMIT, OFFSET and ORDER. To support
+ # these, we must use a subquery.
+ def join_to_update(update, select, key) # :nodoc:
+ if select.limit || select.offset || select.orders.any?
+ super
+ else
+ update.table select.source
+ update.wheres = select.constraints
+ end
+ end
+
+ def empty_insert_statement_value
+ "VALUES ()"
+ end
+
+ # SCHEMA STATEMENTS ========================================
+
+ # Drops the database specified on the +name+ attribute
+ # and creates it again using the provided +options+.
+ def recreate_database(name, options = {})
+ drop_database(name)
+ sql = create_database(name, options)
+ reconnect!
+ sql
+ end
+
+ # Create a new MySQL database with optional <tt>:charset</tt> and <tt>:collation</tt>.
+ # Charset defaults to utf8.
+ #
+ # Example:
+ # create_database 'charset_test', charset: 'latin1', collation: 'latin1_bin'
+ # create_database 'matt_development'
+ # create_database 'matt_development', charset: :big5
+ def create_database(name, options = {})
+ if options[:collation]
+ execute "CREATE DATABASE #{quote_table_name(name)} DEFAULT CHARACTER SET #{quote_table_name(options[:charset] || 'utf8')} COLLATE #{quote_table_name(options[:collation])}"
+ else
+ execute "CREATE DATABASE #{quote_table_name(name)} DEFAULT CHARACTER SET #{quote_table_name(options[:charset] || 'utf8')}"
+ end
+ end
+
+ # Drops a MySQL database.
+ #
+ # Example:
+ # drop_database('sebastian_development')
+ def drop_database(name) #:nodoc:
+ execute "DROP DATABASE IF EXISTS #{quote_table_name(name)}"
+ end
+
+ def current_database
+ query_value("SELECT database()", "SCHEMA")
+ end
+
+ # Returns the database character set.
+ def charset
+ show_variable "character_set_database"
+ end
+
+ # Returns the database collation strategy.
+ def collation
+ show_variable "collation_database"
+ end
+
+ def truncate(table_name, name = nil)
+ execute "TRUNCATE TABLE #{quote_table_name(table_name)}", name
+ end
+
+ def table_comment(table_name) # :nodoc:
+ scope = quoted_scope(table_name)
+
+ query_value(<<-SQL.strip_heredoc, "SCHEMA")
+ SELECT table_comment
+ FROM information_schema.tables
+ WHERE table_schema = #{scope[:schema]}
+ AND table_name = #{scope[:name]}
+ SQL
+ end
+
+ def create_table(table_name, **options) #:nodoc:
+ super(table_name, options: "ENGINE=InnoDB", **options)
+ end
+
+ def bulk_change_table(table_name, operations) #:nodoc:
+ sqls = operations.flat_map do |command, args|
+ table, arguments = args.shift, args
+ method = :"#{command}_sql"
+
+ if respond_to?(method, true)
+ send(method, table, *arguments)
+ else
+ raise "Unknown method called : #{method}(#{arguments.inspect})"
+ end
+ end.join(", ")
+
+ execute("ALTER TABLE #{quote_table_name(table_name)} #{sqls}")
+ end
+
+ # Renames a table.
+ #
+ # Example:
+ # rename_table('octopuses', 'octopi')
+ def rename_table(table_name, new_name)
+ execute "RENAME TABLE #{quote_table_name(table_name)} TO #{quote_table_name(new_name)}"
+ rename_table_indexes(table_name, new_name)
+ end
+
+ # Drops a table from the database.
+ #
+ # [<tt>:force</tt>]
+ # Set to +:cascade+ to drop dependent objects as well.
+ # Defaults to false.
+ # [<tt>:if_exists</tt>]
+ # Set to +true+ to only drop the table if it exists.
+ # Defaults to false.
+ # [<tt>:temporary</tt>]
+ # Set to +true+ to drop temporary table.
+ # Defaults to false.
+ #
+ # Although this command ignores most +options+ and the block if one is given,
+ # it can be helpful to provide these in a migration's +change+ method so it can be reverted.
+ # In that case, +options+ and the block will be used by create_table.
+ def drop_table(table_name, options = {})
+ execute "DROP#{' TEMPORARY' if options[:temporary]} TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_table_name(table_name)}#{' CASCADE' if options[:force] == :cascade}"
+ end
+
+ def rename_index(table_name, old_name, new_name)
+ if supports_rename_index?
+ validate_index_length!(table_name, new_name)
+
+ execute "ALTER TABLE #{quote_table_name(table_name)} RENAME INDEX #{quote_table_name(old_name)} TO #{quote_table_name(new_name)}"
+ else
+ super
+ end
+ end
+
+ def change_column_default(table_name, column_name, default_or_changes) #:nodoc:
+ default = extract_new_default_value(default_or_changes)
+ column = column_for(table_name, column_name)
+ change_column table_name, column_name, column.sql_type, default: default
+ end
+
+ def change_column_null(table_name, column_name, null, default = nil) #:nodoc:
+ column = column_for(table_name, column_name)
+
+ unless null || default.nil?
+ execute("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote(default)} WHERE #{quote_column_name(column_name)} IS NULL")
+ end
+
+ change_column table_name, column_name, column.sql_type, null: null
+ end
+
+ def change_column(table_name, column_name, type, options = {}) #:nodoc:
+ execute("ALTER TABLE #{quote_table_name(table_name)} #{change_column_sql(table_name, column_name, type, options)}")
+ end
+
+ def rename_column(table_name, column_name, new_column_name) #:nodoc:
+ execute("ALTER TABLE #{quote_table_name(table_name)} #{rename_column_sql(table_name, column_name, new_column_name)}")
+ rename_column_indexes(table_name, column_name, new_column_name)
+ end
+
+ def add_index(table_name, column_name, options = {}) #:nodoc:
+ index_name, index_type, index_columns, _, index_algorithm, index_using, comment = add_index_options(table_name, column_name, options)
+ sql = "CREATE #{index_type} INDEX #{quote_column_name(index_name)} #{index_using} ON #{quote_table_name(table_name)} (#{index_columns}) #{index_algorithm}".dup
+ execute add_sql_comment!(sql, comment)
+ end
+
+ def add_sql_comment!(sql, comment) # :nodoc:
+ sql << " COMMENT #{quote(comment)}" if comment.present?
+ sql
+ end
+
+ def foreign_keys(table_name)
+ raise ArgumentError unless table_name.present?
+
+ scope = quoted_scope(table_name)
+
+ fk_info = exec_query(<<-SQL.strip_heredoc, "SCHEMA")
+ SELECT fk.referenced_table_name AS 'to_table',
+ fk.referenced_column_name AS 'primary_key',
+ fk.column_name AS 'column',
+ fk.constraint_name AS 'name',
+ rc.update_rule AS 'on_update',
+ rc.delete_rule AS 'on_delete'
+ FROM information_schema.key_column_usage fk
+ JOIN information_schema.referential_constraints rc
+ USING (constraint_schema, constraint_name)
+ WHERE fk.referenced_column_name IS NOT NULL
+ AND fk.table_schema = #{scope[:schema]}
+ AND fk.table_name = #{scope[:name]}
+ AND rc.table_name = #{scope[:name]}
+ SQL
+
+ fk_info.map do |row|
+ options = {
+ column: row["column"],
+ name: row["name"],
+ primary_key: row["primary_key"]
+ }
+
+ options[:on_update] = extract_foreign_key_action(row["on_update"])
+ options[:on_delete] = extract_foreign_key_action(row["on_delete"])
+
+ ForeignKeyDefinition.new(table_name, row["to_table"], options)
+ end
+ end
+
+ def table_options(table_name) # :nodoc:
+ table_options = {}
+
+ create_table_info = create_table_info(table_name)
+
+ # strip create_definitions and partition_options
+ raw_table_options = create_table_info.sub(/\A.*\n\) /m, "").sub(/\n\/\*!.*\*\/\n\z/m, "").strip
+
+ # strip AUTO_INCREMENT
+ raw_table_options.sub!(/(ENGINE=\w+)(?: AUTO_INCREMENT=\d+)/, '\1')
+
+ table_options[:options] = raw_table_options
+
+ # strip COMMENT
+ if raw_table_options.sub!(/ COMMENT='.+'/, "")
+ table_options[:comment] = table_comment(table_name)
+ end
+
+ table_options
+ end
+
+ # Maps logical Rails types to MySQL-specific data types.
+ def type_to_sql(type, limit: nil, precision: nil, scale: nil, unsigned: nil, **) # :nodoc:
+ sql = \
+ case type.to_s
+ when "integer"
+ integer_to_sql(limit)
+ when "text"
+ text_to_sql(limit)
+ when "blob"
+ binary_to_sql(limit)
+ when "binary"
+ if (0..0xfff) === limit
+ "varbinary(#{limit})"
+ else
+ binary_to_sql(limit)
+ end
+ else
+ super
+ end
+
+ sql = "#{sql} unsigned" if unsigned && type != :primary_key
+ sql
+ end
+
+ # SHOW VARIABLES LIKE 'name'
+ def show_variable(name)
+ query_value("SELECT @@#{name}", "SCHEMA")
+ rescue ActiveRecord::StatementInvalid
+ nil
+ end
+
+ def primary_keys(table_name) # :nodoc:
+ raise ArgumentError unless table_name.present?
+
+ scope = quoted_scope(table_name)
+
+ query_values(<<-SQL.strip_heredoc, "SCHEMA")
+ SELECT column_name
+ FROM information_schema.key_column_usage
+ WHERE constraint_name = 'PRIMARY'
+ AND table_schema = #{scope[:schema]}
+ AND table_name = #{scope[:name]}
+ ORDER BY ordinal_position
+ SQL
+ end
+
+ def case_sensitive_comparison(table, attribute, column, value) # :nodoc:
+ if column.collation && !column.case_sensitive?
+ table[attribute].eq(Arel::Nodes::Bin.new(value))
+ else
+ super
+ end
+ end
+
+ def can_perform_case_insensitive_comparison_for?(column)
+ column.case_sensitive?
+ end
+ private :can_perform_case_insensitive_comparison_for?
+
+ # In MySQL 5.7.5 and up, ONLY_FULL_GROUP_BY affects handling of queries that use
+ # DISTINCT and ORDER BY. It requires the ORDER BY columns in the select list for
+ # distinct queries, and requires that the ORDER BY include the distinct column.
+ # See https://dev.mysql.com/doc/refman/5.7/en/group-by-handling.html
+ def columns_for_distinct(columns, orders) # :nodoc:
+ order_columns = orders.reject(&:blank?).map { |s|
+ # Convert Arel node to string
+ s = s.to_sql unless s.is_a?(String)
+ # Remove any ASC/DESC modifiers
+ s.gsub(/\s+(?:ASC|DESC)\b/i, "")
+ }.reject(&:blank?).map.with_index { |column, i| "#{column} AS alias_#{i}" }
+
+ [super, *order_columns].join(", ")
+ end
+
+ def strict_mode?
+ self.class.type_cast_config_to_boolean(@config.fetch(:strict, true))
+ end
+
+ def default_index_type?(index) # :nodoc:
+ index.using == :btree || super
+ end
+
+ def insert_fixtures(*)
+ without_sql_mode("NO_AUTO_VALUE_ON_ZERO") { super }
+ end
+
+ private
+
+ def without_sql_mode(mode)
+ result = execute("SELECT @@SESSION.sql_mode")
+ current_mode = result.first[0]
+ return yield unless current_mode.include?(mode)
+
+ sql_mode = "REPLACE(@@sql_mode, '#{mode}', '')"
+ execute("SET @@SESSION.sql_mode = #{sql_mode}")
+ yield
+ ensure
+ sql_mode = "CONCAT(@@sql_mode, ',#{mode}')"
+ execute("SET @@SESSION.sql_mode = #{sql_mode}")
+ end
+
+ def initialize_type_map(m = type_map)
+ super
+
+ register_class_with_limit m, %r(char)i, MysqlString
+
+ m.register_type %r(tinytext)i, Type::Text.new(limit: 2**8 - 1)
+ m.register_type %r(tinyblob)i, Type::Binary.new(limit: 2**8 - 1)
+ m.register_type %r(text)i, Type::Text.new(limit: 2**16 - 1)
+ m.register_type %r(blob)i, Type::Binary.new(limit: 2**16 - 1)
+ m.register_type %r(mediumtext)i, Type::Text.new(limit: 2**24 - 1)
+ m.register_type %r(mediumblob)i, Type::Binary.new(limit: 2**24 - 1)
+ m.register_type %r(longtext)i, Type::Text.new(limit: 2**32 - 1)
+ m.register_type %r(longblob)i, Type::Binary.new(limit: 2**32 - 1)
+ m.register_type %r(^float)i, Type::Float.new(limit: 24)
+ m.register_type %r(^double)i, Type::Float.new(limit: 53)
+ m.register_type %r(^json)i, Type::Json.new
+
+ register_integer_type m, %r(^bigint)i, limit: 8
+ register_integer_type m, %r(^int)i, limit: 4
+ register_integer_type m, %r(^mediumint)i, limit: 3
+ register_integer_type m, %r(^smallint)i, limit: 2
+ register_integer_type m, %r(^tinyint)i, limit: 1
+
+ m.register_type %r(^tinyint\(1\))i, Type::Boolean.new if emulate_booleans
+ m.alias_type %r(year)i, "integer"
+ m.alias_type %r(bit)i, "binary"
+
+ m.register_type(%r(enum)i) do |sql_type|
+ limit = sql_type[/^enum\((.+)\)/i, 1]
+ .split(",").map { |enum| enum.strip.length - 2 }.max
+ MysqlString.new(limit: limit)
+ end
+
+ m.register_type(%r(^set)i) do |sql_type|
+ limit = sql_type[/^set\((.+)\)/i, 1]
+ .split(",").map { |set| set.strip.length - 1 }.sum - 1
+ MysqlString.new(limit: limit)
+ end
+ end
+
+ def register_integer_type(mapping, key, options)
+ mapping.register_type(key) do |sql_type|
+ if /\bunsigned\b/.match?(sql_type)
+ Type::UnsignedInteger.new(options)
+ else
+ Type::Integer.new(options)
+ end
+ end
+ end
+
+ def extract_precision(sql_type)
+ if /\A(?:date)?time(?:stamp)?\b/.match?(sql_type)
+ super || 0
+ else
+ super
+ end
+ end
+
+ def add_index_length(quoted_columns, **options)
+ if length = options[:length]
+ case length
+ when Hash
+ length = length.symbolize_keys
+ quoted_columns.each { |name, column| column << "(#{length[name]})" if length[name].present? }
+ when Integer
+ quoted_columns.each { |name, column| column << "(#{length})" }
+ end
+ end
+
+ quoted_columns
+ end
+
+ def add_options_for_index_columns(quoted_columns, **options)
+ quoted_columns = add_index_length(quoted_columns, options)
+ super
+ end
+
+ # See https://dev.mysql.com/doc/refman/5.7/en/error-messages-server.html
+ ER_DUP_ENTRY = 1062
+ ER_NOT_NULL_VIOLATION = 1048
+ ER_DO_NOT_HAVE_DEFAULT = 1364
+ ER_NO_REFERENCED_ROW_2 = 1452
+ ER_DATA_TOO_LONG = 1406
+ ER_OUT_OF_RANGE = 1264
+ ER_LOCK_DEADLOCK = 1213
+ ER_CANNOT_ADD_FOREIGN = 1215
+ ER_CANNOT_CREATE_TABLE = 1005
+
+ def translate_exception(exception, message)
+ case error_number(exception)
+ when ER_DUP_ENTRY
+ RecordNotUnique.new(message)
+ when ER_NO_REFERENCED_ROW_2
+ InvalidForeignKey.new(message)
+ when ER_CANNOT_ADD_FOREIGN
+ mismatched_foreign_key(message)
+ when ER_CANNOT_CREATE_TABLE
+ if message.include?("errno: 150")
+ mismatched_foreign_key(message)
+ else
+ super
+ end
+ when ER_DATA_TOO_LONG
+ ValueTooLong.new(message)
+ when ER_OUT_OF_RANGE
+ RangeError.new(message)
+ when ER_NOT_NULL_VIOLATION, ER_DO_NOT_HAVE_DEFAULT
+ NotNullViolation.new(message)
+ when ER_LOCK_DEADLOCK
+ Deadlocked.new(message)
+ else
+ super
+ end
+ end
+
+ def add_column_sql(table_name, column_name, type, options = {})
+ td = create_table_definition(table_name)
+ cd = td.new_column_definition(column_name, type, options)
+ schema_creation.accept(AddColumnDefinition.new(cd))
+ end
+
+ def change_column_sql(table_name, column_name, type, options = {})
+ column = column_for(table_name, column_name)
+
+ unless options.key?(:default)
+ options[:default] = column.default
+ end
+
+ unless options.key?(:null)
+ options[:null] = column.null
+ end
+
+ unless options.key?(:comment)
+ options[:comment] = column.comment
+ end
+
+ td = create_table_definition(table_name)
+ cd = td.new_column_definition(column.name, type, options)
+ schema_creation.accept(ChangeColumnDefinition.new(cd, column.name))
+ end
+
+ def rename_column_sql(table_name, column_name, new_column_name)
+ column = column_for(table_name, column_name)
+ options = {
+ default: column.default,
+ null: column.null,
+ auto_increment: column.auto_increment?
+ }
+
+ current_type = exec_query("SHOW COLUMNS FROM #{quote_table_name(table_name)} LIKE #{quote(column_name)}", "SCHEMA").first["Type"]
+ td = create_table_definition(table_name)
+ cd = td.new_column_definition(new_column_name, current_type, options)
+ schema_creation.accept(ChangeColumnDefinition.new(cd, column.name))
+ end
+
+ def remove_column_sql(table_name, column_name, type = nil, options = {})
+ "DROP #{quote_column_name(column_name)}"
+ end
+
+ def remove_columns_sql(table_name, *column_names)
+ column_names.map { |column_name| remove_column_sql(table_name, column_name) }
+ end
+
+ def add_index_sql(table_name, column_name, options = {})
+ index_name, index_type, index_columns, _, index_algorithm, index_using = add_index_options(table_name, column_name, options)
+ index_algorithm[0, 0] = ", " if index_algorithm.present?
+ "ADD #{index_type} INDEX #{quote_column_name(index_name)} #{index_using} (#{index_columns})#{index_algorithm}"
+ end
+
+ def remove_index_sql(table_name, options = {})
+ index_name = index_name_for_remove(table_name, options)
+ "DROP INDEX #{index_name}"
+ end
+
+ def add_timestamps_sql(table_name, options = {})
+ [add_column_sql(table_name, :created_at, :datetime, options), add_column_sql(table_name, :updated_at, :datetime, options)]
+ end
+
+ def remove_timestamps_sql(table_name, options = {})
+ [remove_column_sql(table_name, :updated_at), remove_column_sql(table_name, :created_at)]
+ end
+
+ # MySQL is too stupid to create a temporary table for use subquery, so we have
+ # to give it some prompting in the form of a subsubquery. Ugh!
+ def subquery_for(key, select)
+ subselect = select.clone
+ subselect.projections = [key]
+
+ # Materialize subquery by adding distinct
+ # to work with MySQL 5.7.6 which sets optimizer_switch='derived_merge=on'
+ subselect.distinct unless select.limit || select.offset || select.orders.any?
+
+ Arel::SelectManager.new(subselect.as("__active_record_temp")).project(Arel.sql(key.name))
+ end
+
+ def supports_rename_index?
+ mariadb? ? false : version >= "5.7.6"
+ end
+
+ def configure_connection
+ variables = @config.fetch(:variables, {}).stringify_keys
+
+ # By default, MySQL 'where id is null' selects the last inserted id; Turn this off.
+ variables["sql_auto_is_null"] = 0
+
+ # Increase timeout so the server doesn't disconnect us.
+ wait_timeout = self.class.type_cast_config_to_integer(@config[:wait_timeout])
+ wait_timeout = 2147483 unless wait_timeout.is_a?(Integer)
+ variables["wait_timeout"] = wait_timeout
+
+ defaults = [":default", :default].to_set
+
+ # Make MySQL reject illegal values rather than truncating or blanking them, see
+ # http://dev.mysql.com/doc/refman/5.7/en/sql-mode.html#sqlmode_strict_all_tables
+ # If the user has provided another value for sql_mode, don't replace it.
+ if sql_mode = variables.delete("sql_mode")
+ sql_mode = quote(sql_mode)
+ elsif !defaults.include?(strict_mode?)
+ if strict_mode?
+ sql_mode = "CONCAT(@@sql_mode, ',STRICT_ALL_TABLES')"
+ else
+ sql_mode = "REPLACE(@@sql_mode, 'STRICT_TRANS_TABLES', '')"
+ sql_mode = "REPLACE(#{sql_mode}, 'STRICT_ALL_TABLES', '')"
+ sql_mode = "REPLACE(#{sql_mode}, 'TRADITIONAL', '')"
+ end
+ sql_mode = "CONCAT(#{sql_mode}, ',NO_AUTO_VALUE_ON_ZERO')"
+ end
+ sql_mode_assignment = "@@SESSION.sql_mode = #{sql_mode}, " if sql_mode
+
+ # NAMES does not have an equals sign, see
+ # http://dev.mysql.com/doc/refman/5.7/en/set-statement.html#id944430
+ # (trailing comma because variable_assignments will always have content)
+ if @config[:encoding]
+ encoding = "NAMES #{@config[:encoding]}".dup
+ encoding << " COLLATE #{@config[:collation]}" if @config[:collation]
+ encoding << ", "
+ end
+
+ # Gather up all of the SET variables...
+ variable_assignments = variables.map do |k, v|
+ if defaults.include?(v)
+ "@@SESSION.#{k} = DEFAULT" # Sets the value to the global or compile default
+ elsif !v.nil?
+ "@@SESSION.#{k} = #{quote(v)}"
+ end
+ # or else nil; compact to clear nils out
+ end.compact.join(", ")
+
+ # ...and send them all in one query
+ execute "SET #{encoding} #{sql_mode_assignment} #{variable_assignments}"
+ end
+
+ def column_definitions(table_name) # :nodoc:
+ execute_and_free("SHOW FULL FIELDS FROM #{quote_table_name(table_name)}", "SCHEMA") do |result|
+ each_hash(result)
+ end
+ end
+
+ def create_table_info(table_name) # :nodoc:
+ exec_query("SHOW CREATE TABLE #{quote_table_name(table_name)}", "SCHEMA").first["Create Table"]
+ end
+
+ def arel_visitor
+ Arel::Visitors::MySQL.new(self)
+ end
+
+ def mismatched_foreign_key(message)
+ parts = message.scan(/`(\w+)`[ $)]/).flatten
+ MismatchedForeignKey.new(
+ self,
+ message: message,
+ table: parts[0],
+ foreign_key: parts[1],
+ target_table: parts[2],
+ primary_key: parts[3],
+ )
+ end
+
+ def integer_to_sql(limit) # :nodoc:
+ case limit
+ when 1; "tinyint"
+ when 2; "smallint"
+ when 3; "mediumint"
+ when nil, 4; "int"
+ when 5..8; "bigint"
+ else raise(ActiveRecordError, "No integer type has byte size #{limit}. Use a decimal with scale 0 instead.")
+ end
+ end
+
+ def text_to_sql(limit) # :nodoc:
+ case limit
+ when 0..0xff; "tinytext"
+ when nil, 0x100..0xffff; "text"
+ when 0x10000..0xffffff; "mediumtext"
+ when 0x1000000..0xffffffff; "longtext"
+ else raise(ActiveRecordError, "No text type has byte length #{limit}")
+ end
+ end
+
+ def binary_to_sql(limit) # :nodoc:
+ case limit
+ when 0..0xff; "tinyblob"
+ when nil, 0x100..0xffff; "blob"
+ when 0x10000..0xffffff; "mediumblob"
+ when 0x1000000..0xffffffff; "longblob"
+ else raise(ActiveRecordError, "No binary type has byte length #{limit}")
+ end
+ end
+
+ def version_string
+ full_version.match(/^(?:5\.5\.5-)?(\d+\.\d+\.\d+)/)[1]
+ end
+
+ class MysqlString < Type::String # :nodoc:
+ def serialize(value)
+ case value
+ when true then "1"
+ when false then "0"
+ else super
+ end
+ end
+
+ private
+
+ def cast_value(value)
+ case value
+ when true then "1"
+ when false then "0"
+ else super
+ end
+ end
+ end
+
+ ActiveRecord::Type.register(:string, MysqlString, adapter: :mysql2)
+ ActiveRecord::Type.register(:unsigned_integer, Type::UnsignedInteger, adapter: :mysql2)
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/column.rb b/activerecord/lib/active_record/connection_adapters/column.rb
new file mode 100644
index 0000000000..16273fb5f1
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/column.rb
@@ -0,0 +1,91 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # :stopdoc:
+ module ConnectionAdapters
+ # An abstract definition of a column in a table.
+ class Column
+ attr_reader :name, :default, :sql_type_metadata, :null, :table_name, :default_function, :collation, :comment
+
+ delegate :precision, :scale, :limit, :type, :sql_type, to: :sql_type_metadata, allow_nil: true
+
+ # Instantiates a new column in the table.
+ #
+ # +name+ is the column's name, such as <tt>supplier_id</tt> in <tt>supplier_id int</tt>.
+ # +default+ is the type-casted default value, such as +new+ in <tt>sales_stage varchar(20) default 'new'</tt>.
+ # +sql_type_metadata+ is various information about the type of the column
+ # +null+ determines if this column allows +NULL+ values.
+ def initialize(name, default, sql_type_metadata = nil, null = true, table_name = nil, default_function = nil, collation = nil, comment: nil)
+ @name = name.freeze
+ @table_name = table_name
+ @sql_type_metadata = sql_type_metadata
+ @null = null
+ @default = default
+ @default_function = default_function
+ @collation = collation
+ @comment = comment
+ end
+
+ def has_default?
+ !default.nil? || default_function
+ end
+
+ def bigint?
+ /\Abigint\b/.match?(sql_type)
+ end
+
+ # Returns the human name of the column name.
+ #
+ # ===== Examples
+ # Column.new('sales_stage', ...).human_name # => 'Sales stage'
+ def human_name
+ Base.human_attribute_name(@name)
+ end
+
+ def init_with(coder)
+ @name = coder["name"]
+ @table_name = coder["table_name"]
+ @sql_type_metadata = coder["sql_type_metadata"]
+ @null = coder["null"]
+ @default = coder["default"]
+ @default_function = coder["default_function"]
+ @collation = coder["collation"]
+ @comment = coder["comment"]
+ end
+
+ def encode_with(coder)
+ coder["name"] = @name
+ coder["table_name"] = @table_name
+ coder["sql_type_metadata"] = @sql_type_metadata
+ coder["null"] = @null
+ coder["default"] = @default
+ coder["default_function"] = @default_function
+ coder["collation"] = @collation
+ coder["comment"] = @comment
+ end
+
+ def ==(other)
+ other.is_a?(Column) &&
+ attributes_for_hash == other.attributes_for_hash
+ end
+ alias :eql? :==
+
+ def hash
+ attributes_for_hash.hash
+ end
+
+ protected
+
+ def attributes_for_hash
+ [self.class, name, default, sql_type_metadata, null, table_name, default_function, collation]
+ end
+ end
+
+ class NullColumn < Column
+ def initialize(name)
+ super(name, nil)
+ end
+ end
+ end
+ # :startdoc:
+end
diff --git a/activerecord/lib/active_record/connection_adapters/connection_specification.rb b/activerecord/lib/active_record/connection_adapters/connection_specification.rb
new file mode 100644
index 0000000000..29542f917e
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/connection_specification.rb
@@ -0,0 +1,275 @@
+# frozen_string_literal: true
+
+require "uri"
+
+module ActiveRecord
+ module ConnectionAdapters
+ class ConnectionSpecification #:nodoc:
+ attr_reader :name, :config, :adapter_method
+
+ def initialize(name, config, adapter_method)
+ @name, @config, @adapter_method = name, config, adapter_method
+ end
+
+ def initialize_dup(original)
+ @config = original.config.dup
+ end
+
+ def to_hash
+ @config.merge(name: @name)
+ end
+
+ # Expands a connection string into a hash.
+ class ConnectionUrlResolver # :nodoc:
+ # == Example
+ #
+ # url = "postgresql://foo:bar@localhost:9000/foo_test?pool=5&timeout=3000"
+ # ConnectionUrlResolver.new(url).to_hash
+ # # => {
+ # "adapter" => "postgresql",
+ # "host" => "localhost",
+ # "port" => 9000,
+ # "database" => "foo_test",
+ # "username" => "foo",
+ # "password" => "bar",
+ # "pool" => "5",
+ # "timeout" => "3000"
+ # }
+ def initialize(url)
+ raise "Database URL cannot be empty" if url.blank?
+ @uri = uri_parser.parse(url)
+ @adapter = @uri.scheme && @uri.scheme.tr("-", "_")
+ @adapter = "postgresql" if @adapter == "postgres"
+
+ if @uri.opaque
+ @uri.opaque, @query = @uri.opaque.split("?", 2)
+ else
+ @query = @uri.query
+ end
+ end
+
+ # Converts the given URL to a full connection hash.
+ def to_hash
+ config = raw_config.reject { |_, value| value.blank? }
+ config.map { |key, value| config[key] = uri_parser.unescape(value) if value.is_a? String }
+ config
+ end
+
+ private
+
+ def uri
+ @uri
+ end
+
+ def uri_parser
+ @uri_parser ||= URI::Parser.new
+ end
+
+ # Converts the query parameters of the URI into a hash.
+ #
+ # "localhost?pool=5&reaping_frequency=2"
+ # # => { "pool" => "5", "reaping_frequency" => "2" }
+ #
+ # returns empty hash if no query present.
+ #
+ # "localhost"
+ # # => {}
+ def query_hash
+ Hash[(@query || "").split("&").map { |pair| pair.split("=") }]
+ end
+
+ def raw_config
+ if uri.opaque
+ query_hash.merge(
+ "adapter" => @adapter,
+ "database" => uri.opaque)
+ else
+ query_hash.merge(
+ "adapter" => @adapter,
+ "username" => uri.user,
+ "password" => uri.password,
+ "port" => uri.port,
+ "database" => database_from_path,
+ "host" => uri.hostname)
+ end
+ end
+
+ # Returns name of the database.
+ def database_from_path
+ if @adapter == "sqlite3"
+ # 'sqlite3:/foo' is absolute, because that makes sense. The
+ # corresponding relative version, 'sqlite3:foo', is handled
+ # elsewhere, as an "opaque".
+
+ uri.path
+ else
+ # Only SQLite uses a filename as the "database" name; for
+ # anything else, a leading slash would be silly.
+
+ uri.path.sub(%r{^/}, "")
+ end
+ end
+ end
+
+ ##
+ # Builds a ConnectionSpecification from user input.
+ class Resolver # :nodoc:
+ attr_reader :configurations
+
+ # Accepts a hash two layers deep, keys on the first layer represent
+ # environments such as "production". Keys must be strings.
+ def initialize(configurations)
+ @configurations = configurations
+ end
+
+ # Returns a hash with database connection information.
+ #
+ # == Examples
+ #
+ # Full hash Configuration.
+ #
+ # configurations = { "production" => { "host" => "localhost", "database" => "foo", "adapter" => "sqlite3" } }
+ # Resolver.new(configurations).resolve(:production)
+ # # => { "host" => "localhost", "database" => "foo", "adapter" => "sqlite3"}
+ #
+ # Initialized with URL configuration strings.
+ #
+ # configurations = { "production" => "postgresql://localhost/foo" }
+ # Resolver.new(configurations).resolve(:production)
+ # # => { "host" => "localhost", "database" => "foo", "adapter" => "postgresql" }
+ #
+ def resolve(config)
+ if config
+ resolve_connection config
+ elsif env = ActiveRecord::ConnectionHandling::RAILS_ENV.call
+ resolve_symbol_connection env.to_sym
+ else
+ raise AdapterNotSpecified
+ end
+ end
+
+ # Expands each key in @configurations hash into fully resolved hash
+ def resolve_all
+ config = configurations.dup
+
+ if env = ActiveRecord::ConnectionHandling::DEFAULT_ENV.call
+ env_config = config[env] if config[env].is_a?(Hash) && !(config[env].key?("adapter") || config[env].key?("url"))
+ end
+
+ config.reject! { |k, v| v.is_a?(Hash) && !(v.key?("adapter") || v.key?("url")) }
+ config.merge! env_config if env_config
+
+ config.each do |key, value|
+ config[key] = resolve(value) if value
+ end
+
+ config
+ end
+
+ # Returns an instance of ConnectionSpecification for a given adapter.
+ # Accepts a hash one layer deep that contains all connection information.
+ #
+ # == Example
+ #
+ # config = { "production" => { "host" => "localhost", "database" => "foo", "adapter" => "sqlite3" } }
+ # spec = Resolver.new(config).spec(:production)
+ # spec.adapter_method
+ # # => "sqlite3_connection"
+ # spec.config
+ # # => { "host" => "localhost", "database" => "foo", "adapter" => "sqlite3" }
+ #
+ def spec(config)
+ spec = resolve(config).symbolize_keys
+
+ raise(AdapterNotSpecified, "database configuration does not specify adapter") unless spec.key?(:adapter)
+
+ path_to_adapter = "active_record/connection_adapters/#{spec[:adapter]}_adapter"
+ begin
+ require path_to_adapter
+ rescue Gem::LoadError => e
+ raise Gem::LoadError, "Specified '#{spec[:adapter]}' for database adapter, but the gem is not loaded. Add `gem '#{e.name}'` to your Gemfile (and ensure its version is at the minimum required by ActiveRecord)."
+ rescue LoadError => e
+ raise LoadError, "Could not load '#{path_to_adapter}'. Make sure that the adapter in config/database.yml is valid. If you use an adapter other than 'mysql2', 'postgresql' or 'sqlite3' add the necessary adapter gem to the Gemfile.", e.backtrace
+ end
+
+ adapter_method = "#{spec[:adapter]}_connection"
+
+ unless ActiveRecord::Base.respond_to?(adapter_method)
+ raise AdapterNotFound, "database configuration specifies nonexistent #{spec.config[:adapter]} adapter"
+ end
+
+ ConnectionSpecification.new(spec.delete(:name) || "primary", spec, adapter_method)
+ end
+
+ private
+
+ # Returns fully resolved connection, accepts hash, string or symbol.
+ # Always returns a hash.
+ #
+ # == Examples
+ #
+ # Symbol representing current environment.
+ #
+ # Resolver.new("production" => {}).resolve_connection(:production)
+ # # => {}
+ #
+ # One layer deep hash of connection values.
+ #
+ # Resolver.new({}).resolve_connection("adapter" => "sqlite3")
+ # # => { "adapter" => "sqlite3" }
+ #
+ # Connection URL.
+ #
+ # Resolver.new({}).resolve_connection("postgresql://localhost/foo")
+ # # => { "host" => "localhost", "database" => "foo", "adapter" => "postgresql" }
+ #
+ def resolve_connection(spec)
+ case spec
+ when Symbol
+ resolve_symbol_connection spec
+ when String
+ resolve_url_connection spec
+ when Hash
+ resolve_hash_connection spec
+ end
+ end
+
+ # Takes the environment such as +:production+ or +:development+.
+ # This requires that the @configurations was initialized with a key that
+ # matches.
+ #
+ # Resolver.new("production" => {}).resolve_symbol_connection(:production)
+ # # => {}
+ #
+ def resolve_symbol_connection(spec)
+ if config = configurations[spec.to_s]
+ resolve_connection(config).merge("name" => spec.to_s)
+ else
+ raise(AdapterNotSpecified, "'#{spec}' database is not configured. Available: #{configurations.keys.inspect}")
+ end
+ end
+
+ # Accepts a hash. Expands the "url" key that contains a
+ # URL database connection to a full connection
+ # hash and merges with the rest of the hash.
+ # Connection details inside of the "url" key win any merge conflicts
+ def resolve_hash_connection(spec)
+ if spec["url"] && spec["url"] !~ /^jdbc:/
+ connection_hash = resolve_url_connection(spec.delete("url"))
+ spec.merge!(connection_hash)
+ end
+ spec
+ end
+
+ # Takes a connection URL.
+ #
+ # Resolver.new({}).resolve_url_connection("postgresql://localhost/foo")
+ # # => { "host" => "localhost", "database" => "foo", "adapter" => "postgresql" }
+ #
+ def resolve_url_connection(url)
+ ConnectionUrlResolver.new(url).to_hash
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/determine_if_preparable_visitor.rb b/activerecord/lib/active_record/connection_adapters/determine_if_preparable_visitor.rb
new file mode 100644
index 0000000000..3dcb916d99
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/determine_if_preparable_visitor.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module DetermineIfPreparableVisitor
+ attr_reader :preparable
+
+ def accept(*)
+ @preparable = true
+ super
+ end
+
+ def visit_Arel_Nodes_In(*)
+ @preparable = false
+ super
+ end
+
+ def visit_Arel_Nodes_SqlLiteral(*)
+ @preparable = false
+ super
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/column.rb b/activerecord/lib/active_record/connection_adapters/mysql/column.rb
new file mode 100644
index 0000000000..fa1541019d
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/mysql/column.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module MySQL
+ class Column < ConnectionAdapters::Column # :nodoc:
+ delegate :extra, to: :sql_type_metadata, allow_nil: true
+
+ def unsigned?
+ /\bunsigned(?: zerofill)?\z/.match?(sql_type)
+ end
+
+ def case_sensitive?
+ collation && !/_ci\z/.match?(collation)
+ end
+
+ def auto_increment?
+ extra == "auto_increment"
+ end
+
+ def virtual?
+ /\b(?:VIRTUAL|STORED|PERSISTENT)\b/.match?(extra)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/database_statements.rb b/activerecord/lib/active_record/connection_adapters/mysql/database_statements.rb
new file mode 100644
index 0000000000..a058a72872
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/mysql/database_statements.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module MySQL
+ module DatabaseStatements
+ # Returns an ActiveRecord::Result instance.
+ def select_all(*) # :nodoc:
+ result = if ExplainRegistry.collect? && prepared_statements
+ unprepared_statement { super }
+ else
+ super
+ end
+ @connection.next_result while @connection.more_results?
+ result
+ end
+
+ def query(sql, name = nil) # :nodoc:
+ execute(sql, name).to_a
+ end
+
+ # Executes the SQL statement in the context of this connection.
+ def execute(sql, name = nil)
+ # make sure we carry over any changes to ActiveRecord::Base.default_timezone that have been
+ # made since we established the connection
+ @connection.query_options[:database_timezone] = ActiveRecord::Base.default_timezone
+
+ super
+ end
+
+ def exec_query(sql, name = "SQL", binds = [], prepare: false)
+ if without_prepared_statement?(binds)
+ execute_and_free(sql, name) do |result|
+ ActiveRecord::Result.new(result.fields, result.to_a) if result
+ end
+ else
+ exec_stmt_and_free(sql, name, binds, cache_stmt: prepare) do |_, result|
+ ActiveRecord::Result.new(result.fields, result.to_a) if result
+ end
+ end
+ end
+
+ def exec_delete(sql, name = nil, binds = [])
+ if without_prepared_statement?(binds)
+ execute_and_free(sql, name) { @connection.affected_rows }
+ else
+ exec_stmt_and_free(sql, name, binds) { |stmt| stmt.affected_rows }
+ end
+ end
+ alias :exec_update :exec_delete
+
+ private
+
+ def last_inserted_id(result)
+ @connection.last_id
+ end
+
+ def exec_stmt_and_free(sql, name, binds, cache_stmt: false)
+ # make sure we carry over any changes to ActiveRecord::Base.default_timezone that have been
+ # made since we established the connection
+ @connection.query_options[:database_timezone] = ActiveRecord::Base.default_timezone
+
+ type_casted_binds = type_casted_binds(binds)
+
+ log(sql, name, binds, type_casted_binds) do
+ if cache_stmt
+ cache = @statements[sql] ||= {
+ stmt: @connection.prepare(sql)
+ }
+ stmt = cache[:stmt]
+ else
+ stmt = @connection.prepare(sql)
+ end
+
+ begin
+ result = ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ stmt.execute(*type_casted_binds)
+ end
+ rescue Mysql2::Error => e
+ if cache_stmt
+ @statements.delete(sql)
+ else
+ stmt.close
+ end
+ raise e
+ end
+
+ ret = yield stmt, result
+ result.free if result
+ stmt.close unless cache_stmt
+ ret
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/explain_pretty_printer.rb b/activerecord/lib/active_record/connection_adapters/mysql/explain_pretty_printer.rb
new file mode 100644
index 0000000000..20c3c83664
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/mysql/explain_pretty_printer.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module MySQL
+ class ExplainPrettyPrinter # :nodoc:
+ # Pretty prints the result of an EXPLAIN in a way that resembles the output of the
+ # MySQL shell:
+ #
+ # +----+-------------+-------+-------+---------------+---------+---------+-------+------+-------------+
+ # | id | select_type | table | type | possible_keys | key | key_len | ref | rows | Extra |
+ # +----+-------------+-------+-------+---------------+---------+---------+-------+------+-------------+
+ # | 1 | SIMPLE | users | const | PRIMARY | PRIMARY | 4 | const | 1 | |
+ # | 1 | SIMPLE | posts | ALL | NULL | NULL | NULL | NULL | 1 | Using where |
+ # +----+-------------+-------+-------+---------------+---------+---------+-------+------+-------------+
+ # 2 rows in set (0.00 sec)
+ #
+ # This is an exercise in Ruby hyperrealism :).
+ def pp(result, elapsed)
+ widths = compute_column_widths(result)
+ separator = build_separator(widths)
+
+ pp = []
+
+ pp << separator
+ pp << build_cells(result.columns, widths)
+ pp << separator
+
+ result.rows.each do |row|
+ pp << build_cells(row, widths)
+ end
+
+ pp << separator
+ pp << build_footer(result.rows.length, elapsed)
+
+ pp.join("\n") + "\n"
+ end
+
+ private
+
+ def compute_column_widths(result)
+ [].tap do |widths|
+ result.columns.each_with_index do |column, i|
+ cells_in_column = [column] + result.rows.map { |r| r[i].nil? ? "NULL" : r[i].to_s }
+ widths << cells_in_column.map(&:length).max
+ end
+ end
+ end
+
+ def build_separator(widths)
+ padding = 1
+ "+" + widths.map { |w| "-" * (w + (padding * 2)) }.join("+") + "+"
+ end
+
+ def build_cells(items, widths)
+ cells = []
+ items.each_with_index do |item, i|
+ item = "NULL" if item.nil?
+ justifier = item.is_a?(Numeric) ? "rjust" : "ljust"
+ cells << item.to_s.send(justifier, widths[i])
+ end
+ "| " + cells.join(" | ") + " |"
+ end
+
+ def build_footer(nrows, elapsed)
+ rows_label = nrows == 1 ? "row" : "rows"
+ "#{nrows} #{rows_label} in set (%.2f sec)" % elapsed
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/quoting.rb b/activerecord/lib/active_record/connection_adapters/mysql/quoting.rb
new file mode 100644
index 0000000000..be038403b8
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/mysql/quoting.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module MySQL
+ module Quoting # :nodoc:
+ def quote_column_name(name)
+ @quoted_column_names[name] ||= "`#{super.gsub('`', '``')}`".freeze
+ end
+
+ def quote_table_name(name)
+ @quoted_table_names[name] ||= super.gsub(".", "`.`").freeze
+ end
+
+ def unquoted_true
+ 1
+ end
+
+ def unquoted_false
+ 0
+ end
+
+ def quoted_date(value)
+ if supports_datetime_with_precision?
+ super
+ else
+ super.sub(/\.\d{6}\z/, "")
+ end
+ end
+
+ def quoted_binary(value)
+ "x'#{value.hex}'"
+ end
+
+ def _type_cast(value)
+ case value
+ when Date, Time then value
+ else super
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/schema_creation.rb b/activerecord/lib/active_record/connection_adapters/mysql/schema_creation.rb
new file mode 100644
index 0000000000..5a35823c72
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/mysql/schema_creation.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module MySQL
+ class SchemaCreation < AbstractAdapter::SchemaCreation # :nodoc:
+ delegate :add_sql_comment!, :mariadb?, to: :@conn
+ private :add_sql_comment!, :mariadb?
+
+ private
+
+ def visit_DropForeignKey(name)
+ "DROP FOREIGN KEY #{name}"
+ end
+
+ def visit_AddColumnDefinition(o)
+ add_column_position!(super, column_options(o.column))
+ end
+
+ def visit_ChangeColumnDefinition(o)
+ change_column_sql = "CHANGE #{quote_column_name(o.name)} #{accept(o.column)}".dup
+ add_column_position!(change_column_sql, column_options(o.column))
+ end
+
+ def add_table_options!(create_sql, options)
+ add_sql_comment!(super, options[:comment])
+ end
+
+ def add_column_options!(sql, options)
+ # By default, TIMESTAMP columns are NOT NULL, cannot contain NULL values,
+ # and assigning NULL assigns the current timestamp. To permit a TIMESTAMP
+ # column to contain NULL, explicitly declare it with the NULL attribute.
+ # See http://dev.mysql.com/doc/refman/5.7/en/timestamp-initialization.html
+ if /\Atimestamp\b/.match?(options[:column].sql_type) && !options[:primary_key]
+ sql << " NULL" unless options[:null] == false || options_include_default?(options)
+ end
+
+ if charset = options[:charset]
+ sql << " CHARACTER SET #{charset}"
+ end
+
+ if collation = options[:collation]
+ sql << " COLLATE #{collation}"
+ end
+
+ if as = options[:as]
+ sql << " AS (#{as})"
+ if options[:stored]
+ sql << (mariadb? ? " PERSISTENT" : " STORED")
+ end
+ end
+
+ add_sql_comment!(super, options[:comment])
+ end
+
+ def add_column_position!(sql, options)
+ if options[:first]
+ sql << " FIRST"
+ elsif options[:after]
+ sql << " AFTER #{quote_column_name(options[:after])}"
+ end
+
+ sql
+ end
+
+ def index_in_create(table_name, column_name, options)
+ index_name, index_type, index_columns, _, _, index_using, comment = @conn.add_index_options(table_name, column_name, options)
+ add_sql_comment!("#{index_type} INDEX #{quote_column_name(index_name)} #{index_using} (#{index_columns})".dup, comment)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/schema_definitions.rb b/activerecord/lib/active_record/connection_adapters/mysql/schema_definitions.rb
new file mode 100644
index 0000000000..b22a2e4da7
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/mysql/schema_definitions.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module MySQL
+ module ColumnMethods
+ def primary_key(name, type = :primary_key, **options)
+ options[:auto_increment] = true if [:integer, :bigint].include?(type) && !options.key?(:default)
+ super
+ end
+
+ def blob(*args, **options)
+ args.each { |name| column(name, :blob, options) }
+ end
+
+ def tinyblob(*args, **options)
+ args.each { |name| column(name, :tinyblob, options) }
+ end
+
+ def mediumblob(*args, **options)
+ args.each { |name| column(name, :mediumblob, options) }
+ end
+
+ def longblob(*args, **options)
+ args.each { |name| column(name, :longblob, options) }
+ end
+
+ def tinytext(*args, **options)
+ args.each { |name| column(name, :tinytext, options) }
+ end
+
+ def mediumtext(*args, **options)
+ args.each { |name| column(name, :mediumtext, options) }
+ end
+
+ def longtext(*args, **options)
+ args.each { |name| column(name, :longtext, options) }
+ end
+
+ def json(*args, **options)
+ args.each { |name| column(name, :json, options) }
+ end
+
+ def unsigned_integer(*args, **options)
+ args.each { |name| column(name, :unsigned_integer, options) }
+ end
+
+ def unsigned_bigint(*args, **options)
+ args.each { |name| column(name, :unsigned_bigint, options) }
+ end
+
+ def unsigned_float(*args, **options)
+ args.each { |name| column(name, :unsigned_float, options) }
+ end
+
+ def unsigned_decimal(*args, **options)
+ args.each { |name| column(name, :unsigned_decimal, options) }
+ end
+ end
+
+ class TableDefinition < ActiveRecord::ConnectionAdapters::TableDefinition
+ include ColumnMethods
+
+ def new_column_definition(name, type, **options) # :nodoc:
+ case type
+ when :virtual
+ type = options[:type]
+ when :primary_key
+ type = :integer
+ options[:limit] ||= 8
+ options[:auto_increment] = true
+ options[:primary_key] = true
+ when /\Aunsigned_(?<type>.+)\z/
+ type = $~[:type].to_sym
+ options[:unsigned] = true
+ end
+
+ super
+ end
+
+ private
+ def aliased_types(name, fallback)
+ fallback
+ end
+ end
+
+ class Table < ActiveRecord::ConnectionAdapters::Table
+ include ColumnMethods
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/schema_dumper.rb b/activerecord/lib/active_record/connection_adapters/mysql/schema_dumper.rb
new file mode 100644
index 0000000000..fbe3596dda
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/mysql/schema_dumper.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module MySQL
+ module ColumnDumper # :nodoc:
+ def prepare_column_options(column)
+ spec = super
+ spec[:unsigned] = "true" if column.unsigned?
+
+ if supports_virtual_columns? && column.virtual?
+ spec[:as] = extract_expression_for_virtual_column(column)
+ spec[:stored] = "true" if /\b(?:STORED|PERSISTENT)\b/.match?(column.extra)
+ spec = { type: schema_type(column).inspect }.merge!(spec)
+ end
+
+ spec
+ end
+
+ def migration_keys
+ super + [:unsigned]
+ end
+
+ private
+
+ def default_primary_key?(column)
+ super && column.auto_increment? && !column.unsigned?
+ end
+
+ def explicit_primary_key_default?(column)
+ column.type == :integer && !column.auto_increment?
+ end
+
+ def schema_type(column)
+ case column.sql_type
+ when /\Atimestamp\b/
+ :timestamp
+ when "tinyblob"
+ :blob
+ else
+ super
+ end
+ end
+
+ def schema_precision(column)
+ super unless /\A(?:date)?time(?:stamp)?\b/.match?(column.sql_type) && column.precision == 0
+ end
+
+ def schema_collation(column)
+ if column.collation && table_name = column.table_name
+ @table_collation_cache ||= {}
+ @table_collation_cache[table_name] ||= exec_query("SHOW TABLE STATUS LIKE #{quote(table_name)}", "SCHEMA").first["Collation"]
+ column.collation.inspect if column.collation != @table_collation_cache[table_name]
+ end
+ end
+
+ def extract_expression_for_virtual_column(column)
+ if mariadb? && version < "10.2.5"
+ create_table_info = create_table_info(column.table_name)
+ if %r/#{quote_column_name(column.name)} #{Regexp.quote(column.sql_type)}(?: COLLATE \w+)? AS \((?<expression>.+?)\) #{column.extra}/ =~ create_table_info
+ $~[:expression].inspect
+ end
+ else
+ scope = quoted_scope(column.table_name)
+ sql = "SELECT generation_expression FROM information_schema.columns" \
+ " WHERE table_schema = #{scope[:schema]}" \
+ " AND table_name = #{scope[:name]}" \
+ " AND column_name = #{quote(column.name)}"
+ query_value(sql, "SCHEMA").inspect
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/schema_statements.rb b/activerecord/lib/active_record/connection_adapters/mysql/schema_statements.rb
new file mode 100644
index 0000000000..1d87d60ba9
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/mysql/schema_statements.rb
@@ -0,0 +1,131 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module MySQL
+ module SchemaStatements # :nodoc:
+ # Returns an array of indexes for the given table.
+ def indexes(table_name, name = nil)
+ if name
+ ActiveSupport::Deprecation.warn(<<-MSG.squish)
+ Passing name to #indexes is deprecated without replacement.
+ MSG
+ end
+
+ indexes = []
+ current_index = nil
+ execute_and_free("SHOW KEYS FROM #{quote_table_name(table_name)}", "SCHEMA") do |result|
+ each_hash(result) do |row|
+ if current_index != row[:Key_name]
+ next if row[:Key_name] == "PRIMARY" # skip the primary key
+ current_index = row[:Key_name]
+
+ mysql_index_type = row[:Index_type].downcase.to_sym
+ case mysql_index_type
+ when :fulltext, :spatial
+ index_type = mysql_index_type
+ when :btree, :hash
+ index_using = mysql_index_type
+ end
+
+ indexes << IndexDefinition.new(
+ row[:Table],
+ row[:Key_name],
+ row[:Non_unique].to_i == 0,
+ type: index_type,
+ using: index_using,
+ comment: row[:Index_comment].presence
+ )
+ end
+
+ indexes.last.columns << row[:Column_name]
+ indexes.last.lengths.merge!(row[:Column_name] => row[:Sub_part].to_i) if row[:Sub_part]
+ indexes.last.orders.merge!(row[:Column_name] => :desc) if row[:Collation] == "D"
+ end
+ end
+
+ indexes
+ end
+
+ def remove_column(table_name, column_name, type = nil, options = {})
+ if foreign_key_exists?(table_name, column: column_name)
+ remove_foreign_key(table_name, column: column_name)
+ end
+ super
+ end
+
+ def internal_string_options_for_primary_key
+ super.tap do |options|
+ if CHARSETS_OF_4BYTES_MAXLEN.include?(charset) && (mariadb? || version < "8.0.0")
+ options[:collation] = collation.sub(/\A[^_]+/, "utf8")
+ end
+ end
+ end
+
+ private
+ CHARSETS_OF_4BYTES_MAXLEN = ["utf8mb4", "utf16", "utf16le", "utf32"]
+
+ def schema_creation
+ MySQL::SchemaCreation.new(self)
+ end
+
+ def create_table_definition(*args)
+ MySQL::TableDefinition.new(*args)
+ end
+
+ def new_column_from_field(table_name, field)
+ type_metadata = fetch_type_metadata(field[:Type], field[:Extra])
+ if type_metadata.type == :datetime && /\ACURRENT_TIMESTAMP(?:\(\))?\z/i.match?(field[:Default])
+ default, default_function = nil, "CURRENT_TIMESTAMP"
+ else
+ default, default_function = field[:Default], nil
+ end
+
+ MySQL::Column.new(
+ field[:Field],
+ default,
+ type_metadata,
+ field[:Null] == "YES",
+ table_name,
+ default_function,
+ field[:Collation],
+ comment: field[:Comment].presence
+ )
+ end
+
+ def fetch_type_metadata(sql_type, extra = "")
+ MySQL::TypeMetadata.new(super(sql_type), extra: extra)
+ end
+
+ def extract_foreign_key_action(specifier)
+ super unless specifier == "RESTRICT"
+ end
+
+ def data_source_sql(name = nil, type: nil)
+ scope = quoted_scope(name, type: type)
+
+ sql = "SELECT table_name FROM information_schema.tables".dup
+ sql << " WHERE table_schema = #{scope[:schema]}"
+ sql << " AND table_name = #{scope[:name]}" if scope[:name]
+ sql << " AND table_type = #{scope[:type]}" if scope[:type]
+ sql
+ end
+
+ def quoted_scope(name = nil, type: nil)
+ schema, name = extract_schema_qualified_name(name)
+ scope = {}
+ scope[:schema] = schema ? quote(schema) : "database()"
+ scope[:name] = quote(name) if name
+ scope[:type] = quote(type) if type
+ scope
+ end
+
+ def extract_schema_qualified_name(string)
+ schema, name = string.to_s.scan(/[^`.\s]+|`[^`]*`/)
+ schema, name = nil, schema unless name
+ [schema, name]
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/type_metadata.rb b/activerecord/lib/active_record/connection_adapters/mysql/type_metadata.rb
new file mode 100644
index 0000000000..7ad0944d51
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/mysql/type_metadata.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module MySQL
+ class TypeMetadata < DelegateClass(SqlTypeMetadata) # :nodoc:
+ undef to_yaml if method_defined?(:to_yaml)
+
+ attr_reader :extra
+
+ def initialize(type_metadata, extra: "")
+ super(type_metadata)
+ @type_metadata = type_metadata
+ @extra = extra
+ end
+
+ def ==(other)
+ other.is_a?(MySQL::TypeMetadata) &&
+ attributes_for_hash == other.attributes_for_hash
+ end
+ alias eql? ==
+
+ def hash
+ attributes_for_hash.hash
+ end
+
+ protected
+
+ def attributes_for_hash
+ [self.class, @type_metadata, extra]
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql2_adapter.rb b/activerecord/lib/active_record/connection_adapters/mysql2_adapter.rb
new file mode 100644
index 0000000000..2c2321872d
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/mysql2_adapter.rb
@@ -0,0 +1,125 @@
+# frozen_string_literal: true
+
+require_relative "abstract_mysql_adapter"
+require_relative "mysql/database_statements"
+
+gem "mysql2", ">= 0.3.18", "< 0.5"
+require "mysql2"
+raise "mysql2 0.4.3 is not supported. Please upgrade to 0.4.4+" if Mysql2::VERSION == "0.4.3"
+
+module ActiveRecord
+ module ConnectionHandling # :nodoc:
+ # Establishes a connection to the database that's used by all Active Record objects.
+ def mysql2_connection(config)
+ config = config.symbolize_keys
+ config[:flags] ||= 0
+
+ if config[:flags].kind_of? Array
+ config[:flags].push "FOUND_ROWS".freeze
+ else
+ config[:flags] |= Mysql2::Client::FOUND_ROWS
+ end
+
+ client = Mysql2::Client.new(config)
+ ConnectionAdapters::Mysql2Adapter.new(client, logger, nil, config)
+ rescue Mysql2::Error => error
+ if error.message.include?("Unknown database")
+ raise ActiveRecord::NoDatabaseError
+ else
+ raise
+ end
+ end
+ end
+
+ module ConnectionAdapters
+ class Mysql2Adapter < AbstractMysqlAdapter
+ ADAPTER_NAME = "Mysql2".freeze
+
+ include MySQL::DatabaseStatements
+
+ def initialize(connection, logger, connection_options, config)
+ super
+ @prepared_statements = false unless config.key?(:prepared_statements)
+ configure_connection
+ end
+
+ def supports_json?
+ !mariadb? && version >= "5.7.8"
+ end
+
+ def supports_comments?
+ true
+ end
+
+ def supports_comments_in_create?
+ true
+ end
+
+ def supports_savepoints?
+ true
+ end
+
+ # HELPER METHODS ===========================================
+
+ def each_hash(result) # :nodoc:
+ if block_given?
+ result.each(as: :hash, symbolize_keys: true) do |row|
+ yield row
+ end
+ else
+ to_enum(:each_hash, result)
+ end
+ end
+
+ def error_number(exception)
+ exception.error_number if exception.respond_to?(:error_number)
+ end
+
+ #--
+ # QUOTING ==================================================
+ #++
+
+ def quote_string(string)
+ @connection.escape(string)
+ end
+
+ #--
+ # CONNECTION MANAGEMENT ====================================
+ #++
+
+ def active?
+ @connection.ping
+ end
+
+ def reconnect!
+ super
+ disconnect!
+ connect
+ end
+ alias :reset! :reconnect!
+
+ # Disconnects from the database if already connected.
+ # Otherwise, this method does nothing.
+ def disconnect!
+ super
+ @connection.close
+ end
+
+ private
+
+ def connect
+ @connection = Mysql2::Client.new(@config)
+ configure_connection
+ end
+
+ def configure_connection
+ @connection.query_options.merge!(as: :array)
+ super
+ end
+
+ def full_version
+ @full_version ||= @connection.server_info[:version]
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/column.rb b/activerecord/lib/active_record/connection_adapters/postgresql/column.rb
new file mode 100644
index 0000000000..1b67cee24b
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/column.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ # PostgreSQL-specific extensions to column definitions in a table.
+ class PostgreSQLColumn < Column #:nodoc:
+ delegate :array, :oid, :fmod, to: :sql_type_metadata
+ alias :array? :array
+
+ def serial?
+ return unless default_function
+
+ %r{\Anextval\('"?#{table_name}_#{name}_seq"?'::regclass\)\z} === default_function
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/database_statements.rb b/activerecord/lib/active_record/connection_adapters/postgresql/database_statements.rb
new file mode 100644
index 0000000000..0dd4aac463
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/database_statements.rb
@@ -0,0 +1,164 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module DatabaseStatements
+ def explain(arel, binds = [])
+ sql, binds = to_sql(arel, binds)
+ sql = "EXPLAIN #{sql}"
+ PostgreSQL::ExplainPrettyPrinter.new.pp(exec_query(sql, "EXPLAIN", binds))
+ end
+
+ # The internal PostgreSQL identifier of the money data type.
+ MONEY_COLUMN_TYPE_OID = 790 #:nodoc:
+ # The internal PostgreSQL identifier of the BYTEA data type.
+ BYTEA_COLUMN_TYPE_OID = 17 #:nodoc:
+
+ # create a 2D array representing the result set
+ def result_as_array(res) #:nodoc:
+ # check if we have any binary column and if they need escaping
+ ftypes = Array.new(res.nfields) do |i|
+ [i, res.ftype(i)]
+ end
+
+ rows = res.values
+ return rows unless ftypes.any? { |_, x|
+ x == BYTEA_COLUMN_TYPE_OID || x == MONEY_COLUMN_TYPE_OID
+ }
+
+ typehash = ftypes.group_by { |_, type| type }
+ binaries = typehash[BYTEA_COLUMN_TYPE_OID] || []
+ monies = typehash[MONEY_COLUMN_TYPE_OID] || []
+
+ rows.each do |row|
+ # unescape string passed BYTEA field (OID == 17)
+ binaries.each do |index, _|
+ row[index] = unescape_bytea(row[index])
+ end
+
+ # If this is a money type column and there are any currency symbols,
+ # then strip them off. Indeed it would be prettier to do this in
+ # PostgreSQLColumn.string_to_decimal but would break form input
+ # fields that call value_before_type_cast.
+ monies.each do |index, _|
+ data = row[index]
+ # Because money output is formatted according to the locale, there are two
+ # cases to consider (note the decimal separators):
+ # (1) $12,345,678.12
+ # (2) $12.345.678,12
+ case data
+ when /^-?\D+[\d,]+\.\d{2}$/ # (1)
+ data.gsub!(/[^-\d.]/, "")
+ when /^-?\D+[\d.]+,\d{2}$/ # (2)
+ data.gsub!(/[^-\d,]/, "").sub!(/,/, ".")
+ end
+ end
+ end
+ end
+
+ # Queries the database and returns the results in an Array-like object
+ def query(sql, name = nil) #:nodoc:
+ log(sql, name) do
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ result_as_array @connection.async_exec(sql)
+ end
+ end
+ end
+
+ # Executes an SQL statement, returning a PG::Result object on success
+ # or raising a PG::Error exception otherwise.
+ # Note: the PG::Result object is manually memory managed; if you don't
+ # need it specifically, you may want consider the <tt>exec_query</tt> wrapper.
+ def execute(sql, name = nil)
+ log(sql, name) do
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ @connection.async_exec(sql)
+ end
+ end
+ end
+
+ def exec_query(sql, name = "SQL", binds = [], prepare: false)
+ execute_and_clear(sql, name, binds, prepare: prepare) do |result|
+ types = {}
+ fields = result.fields
+ fields.each_with_index do |fname, i|
+ ftype = result.ftype i
+ fmod = result.fmod i
+ types[fname] = get_oid_type(ftype, fmod, fname)
+ end
+ ActiveRecord::Result.new(fields, result.values, types)
+ end
+ end
+
+ def exec_delete(sql, name = nil, binds = [])
+ execute_and_clear(sql, name, binds) { |result| result.cmd_tuples }
+ end
+ alias :exec_update :exec_delete
+
+ def sql_for_insert(sql, pk, id_value, sequence_name, binds) # :nodoc:
+ if pk.nil?
+ # Extract the table from the insert sql. Yuck.
+ table_ref = extract_table_ref_from_insert_sql(sql)
+ pk = primary_key(table_ref) if table_ref
+ end
+
+ if pk = suppress_composite_primary_key(pk)
+ sql = "#{sql} RETURNING #{quote_column_name(pk)}"
+ end
+
+ super
+ end
+ private :sql_for_insert
+
+ def exec_insert(sql, name = nil, binds = [], pk = nil, sequence_name = nil)
+ if use_insert_returning? || pk == false
+ super
+ else
+ result = exec_query(sql, name, binds)
+ unless sequence_name
+ table_ref = extract_table_ref_from_insert_sql(sql)
+ if table_ref
+ pk = primary_key(table_ref) if pk.nil?
+ pk = suppress_composite_primary_key(pk)
+ sequence_name = default_sequence_name(table_ref, pk)
+ end
+ return result unless sequence_name
+ end
+ last_insert_id_result(sequence_name)
+ end
+ end
+
+ # Begins a transaction.
+ def begin_db_transaction
+ execute "BEGIN"
+ end
+
+ def begin_isolated_db_transaction(isolation)
+ begin_db_transaction
+ execute "SET TRANSACTION ISOLATION LEVEL #{transaction_isolation_levels.fetch(isolation)}"
+ end
+
+ # Commits a transaction.
+ def commit_db_transaction
+ execute "COMMIT"
+ end
+
+ # Aborts a transaction.
+ def exec_rollback_db_transaction
+ execute "ROLLBACK"
+ end
+
+ private
+ # Returns the current ID of a table's sequence.
+ def last_insert_id_result(sequence_name)
+ exec_query("SELECT currval(#{quote(sequence_name)})", "SQL")
+ end
+
+ def suppress_composite_primary_key(pk)
+ pk unless pk.is_a?(Array)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/explain_pretty_printer.rb b/activerecord/lib/active_record/connection_adapters/postgresql/explain_pretty_printer.rb
new file mode 100644
index 0000000000..086a5dcc15
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/explain_pretty_printer.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ class ExplainPrettyPrinter # :nodoc:
+ # Pretty prints the result of an EXPLAIN in a way that resembles the output of the
+ # PostgreSQL shell:
+ #
+ # QUERY PLAN
+ # ------------------------------------------------------------------------------
+ # Nested Loop Left Join (cost=0.00..37.24 rows=8 width=0)
+ # Join Filter: (posts.user_id = users.id)
+ # -> Index Scan using users_pkey on users (cost=0.00..8.27 rows=1 width=4)
+ # Index Cond: (id = 1)
+ # -> Seq Scan on posts (cost=0.00..28.88 rows=8 width=4)
+ # Filter: (posts.user_id = 1)
+ # (6 rows)
+ #
+ def pp(result)
+ header = result.columns.first
+ lines = result.rows.map(&:first)
+
+ # We add 2 because there's one char of padding at both sides, note
+ # the extra hyphens in the example above.
+ width = [header, *lines].map(&:length).max + 2
+
+ pp = []
+
+ pp << header.center(width).rstrip
+ pp << "-" * width
+
+ pp += lines.map { |line| " #{line}" }
+
+ nrows = result.rows.length
+ rows_label = nrows == 1 ? "row" : "rows"
+ pp << "(#{nrows} #{rows_label})"
+
+ pp.join("\n") + "\n"
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid.rb
new file mode 100644
index 0000000000..b28418d74f
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require_relative "oid/array"
+require_relative "oid/bit"
+require_relative "oid/bit_varying"
+require_relative "oid/bytea"
+require_relative "oid/cidr"
+require_relative "oid/date_time"
+require_relative "oid/decimal"
+require_relative "oid/enum"
+require_relative "oid/hstore"
+require_relative "oid/inet"
+require_relative "oid/jsonb"
+require_relative "oid/money"
+require_relative "oid/oid"
+require_relative "oid/point"
+require_relative "oid/legacy_point"
+require_relative "oid/range"
+require_relative "oid/specialized_string"
+require_relative "oid/uuid"
+require_relative "oid/vector"
+require_relative "oid/xml"
+
+require_relative "oid/type_map_initializer"
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/array.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/array.rb
new file mode 100644
index 0000000000..d6852082ac
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/array.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Array < Type::Value # :nodoc:
+ include Type::Helpers::Mutable
+
+ Data = Struct.new(:encoder, :values) # :nodoc:
+
+ attr_reader :subtype, :delimiter
+ delegate :type, :user_input_in_time_zone, :limit, :precision, :scale, to: :subtype
+
+ def initialize(subtype, delimiter = ",")
+ @subtype = subtype
+ @delimiter = delimiter
+
+ @pg_encoder = PG::TextEncoder::Array.new name: "#{type}[]", delimiter: delimiter
+ @pg_decoder = PG::TextDecoder::Array.new name: "#{type}[]", delimiter: delimiter
+ end
+
+ def deserialize(value)
+ case value
+ when ::String
+ type_cast_array(@pg_decoder.decode(value), :deserialize)
+ when Data
+ type_cast_array(value.values, :deserialize)
+ else
+ super
+ end
+ end
+
+ def cast(value)
+ if value.is_a?(::String)
+ value = @pg_decoder.decode(value)
+ end
+ type_cast_array(value, :cast)
+ end
+
+ def serialize(value)
+ if value.is_a?(::Array)
+ casted_values = type_cast_array(value, :serialize)
+ Data.new(@pg_encoder, casted_values)
+ else
+ super
+ end
+ end
+
+ def ==(other)
+ other.is_a?(Array) &&
+ subtype == other.subtype &&
+ delimiter == other.delimiter
+ end
+
+ def type_cast_for_schema(value)
+ return super unless value.is_a?(::Array)
+ "[" + value.map { |v| subtype.type_cast_for_schema(v) }.join(", ") + "]"
+ end
+
+ def map(value, &block)
+ value.map(&block)
+ end
+
+ def changed_in_place?(raw_old_value, new_value)
+ deserialize(raw_old_value) != new_value
+ end
+
+ private
+
+ def type_cast_array(value, method)
+ if value.is_a?(::Array)
+ value.map { |item| type_cast_array(item, method) }
+ else
+ @subtype.public_send(method, value)
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit.rb
new file mode 100644
index 0000000000..587e95d192
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Bit < Type::Value # :nodoc:
+ def type
+ :bit
+ end
+
+ def cast_value(value)
+ if ::String === value
+ case value
+ when /^0x/i
+ value[2..-1].hex.to_s(2) # Hexadecimal notation
+ else
+ value # Bit-string notation
+ end
+ else
+ value.to_s
+ end
+ end
+
+ def serialize(value)
+ Data.new(super) if value
+ end
+
+ class Data
+ def initialize(value)
+ @value = value
+ end
+
+ def to_s
+ value
+ end
+
+ def binary?
+ /\A[01]*\Z/.match?(value)
+ end
+
+ def hex?
+ /\A[0-9A-F]*\Z/i.match?(value)
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+
+ attr_reader :value
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit_varying.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit_varying.rb
new file mode 100644
index 0000000000..dc7079dda2
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit_varying.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class BitVarying < OID::Bit # :nodoc:
+ def type
+ :bit_varying
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/bytea.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bytea.rb
new file mode 100644
index 0000000000..a3c60ecef6
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bytea.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Bytea < Type::Binary # :nodoc:
+ def deserialize(value)
+ return if value.nil?
+ return value.to_s if value.is_a?(Type::Binary::Data)
+ PG::Connection.unescape_bytea(super)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/cidr.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/cidr.rb
new file mode 100644
index 0000000000..66e99d9404
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/cidr.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require "ipaddr"
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Cidr < Type::Value # :nodoc:
+ def type
+ :cidr
+ end
+
+ def type_cast_for_schema(value)
+ subnet_mask = value.instance_variable_get(:@mask_addr)
+
+ # If the subnet mask is equal to /32, don't output it
+ if subnet_mask == (2**32 - 1)
+ "\"#{value}\""
+ else
+ "\"#{value}/#{subnet_mask.to_s(2).count('1')}\""
+ end
+ end
+
+ def serialize(value)
+ if IPAddr === value
+ "#{value}/#{value.instance_variable_get(:@mask_addr).to_s(2).count('1')}"
+ else
+ value
+ end
+ end
+
+ def cast_value(value)
+ if value.nil?
+ nil
+ elsif String === value
+ begin
+ IPAddr.new(value)
+ rescue ArgumentError
+ nil
+ end
+ else
+ value
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/date_time.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/date_time.rb
new file mode 100644
index 0000000000..cd667422f5
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/date_time.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class DateTime < Type::DateTime # :nodoc:
+ def cast_value(value)
+ case value
+ when "infinity" then ::Float::INFINITY
+ when "-infinity" then -::Float::INFINITY
+ when / BC$/
+ astronomical_year = format("%04d", -value[/^\d+/].to_i + 1)
+ super(value.sub(/ BC$/, "").sub(/^\d+/, astronomical_year))
+ else
+ super
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/decimal.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/decimal.rb
new file mode 100644
index 0000000000..879dba7afd
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/decimal.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Decimal < Type::Decimal # :nodoc:
+ def infinity(options = {})
+ BigDecimal.new("Infinity") * (options[:negative] ? -1 : 1)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/enum.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/enum.rb
new file mode 100644
index 0000000000..f70f09ad95
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/enum.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Enum < Type::Value # :nodoc:
+ def type
+ :enum
+ end
+
+ private
+
+ def cast_value(value)
+ value.to_s
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/hstore.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/hstore.rb
new file mode 100644
index 0000000000..aabe83b85d
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/hstore.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Hstore < Type::Value # :nodoc:
+ include Type::Helpers::Mutable
+
+ def type
+ :hstore
+ end
+
+ def deserialize(value)
+ if value.is_a?(::String)
+ ::Hash[value.scan(HstorePair).map { |k, v|
+ v = v.upcase == "NULL" ? nil : v.gsub(/\A"(.*)"\Z/m, '\1').gsub(/\\(.)/, '\1')
+ k = k.gsub(/\A"(.*)"\Z/m, '\1').gsub(/\\(.)/, '\1')
+ [k, v]
+ }]
+ else
+ value
+ end
+ end
+
+ def serialize(value)
+ if value.is_a?(::Hash)
+ value.map { |k, v| "#{escape_hstore(k)}=>#{escape_hstore(v)}" }.join(", ")
+ elsif value.respond_to?(:to_unsafe_h)
+ serialize(value.to_unsafe_h)
+ else
+ value
+ end
+ end
+
+ def accessor
+ ActiveRecord::Store::StringKeyedHashAccessor
+ end
+
+ # Will compare the Hash equivalents of +raw_old_value+ and +new_value+.
+ # By comparing hashes, this avoids an edge case where the order of
+ # the keys change between the two hashes, and they would not be marked
+ # as equal.
+ def changed_in_place?(raw_old_value, new_value)
+ deserialize(raw_old_value) != new_value
+ end
+
+ private
+
+ HstorePair = begin
+ quoted_string = /"[^"\\]*(?:\\.[^"\\]*)*"/
+ unquoted_string = /(?:\\.|[^\s,])[^\s=,\\]*(?:\\.[^\s=,\\]*|=[^,>])*/
+ /(#{quoted_string}|#{unquoted_string})\s*=>\s*(#{quoted_string}|#{unquoted_string})/
+ end
+
+ def escape_hstore(value)
+ if value.nil?
+ "NULL"
+ else
+ if value == ""
+ '""'
+ else
+ '"%s"' % value.to_s.gsub(/(["\\])/, '\\\\\1')
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/inet.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/inet.rb
new file mode 100644
index 0000000000..55be71fd26
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/inet.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Inet < Cidr # :nodoc:
+ def type
+ :inet
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/jsonb.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/jsonb.rb
new file mode 100644
index 0000000000..e0216f1089
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/jsonb.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Jsonb < Type::Json # :nodoc:
+ def type
+ :jsonb
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/legacy_point.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/legacy_point.rb
new file mode 100644
index 0000000000..7b057a8452
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/legacy_point.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class LegacyPoint < Type::Value # :nodoc:
+ include Type::Helpers::Mutable
+
+ def type
+ :point
+ end
+
+ def cast(value)
+ case value
+ when ::String
+ if value[0] == "(" && value[-1] == ")"
+ value = value[1...-1]
+ end
+ cast(value.split(","))
+ when ::Array
+ value.map { |v| Float(v) }
+ else
+ value
+ end
+ end
+
+ def serialize(value)
+ if value.is_a?(::Array)
+ "(#{number_for_point(value[0])},#{number_for_point(value[1])})"
+ else
+ super
+ end
+ end
+
+ private
+
+ def number_for_point(number)
+ number.to_s.gsub(/\.0$/, "")
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/money.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/money.rb
new file mode 100644
index 0000000000..6434377b57
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/money.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Money < Type::Decimal # :nodoc:
+ def type
+ :money
+ end
+
+ def scale
+ 2
+ end
+
+ def cast_value(value)
+ return value unless ::String === value
+
+ # Because money output is formatted according to the locale, there are two
+ # cases to consider (note the decimal separators):
+ # (1) $12,345,678.12
+ # (2) $12.345.678,12
+ # Negative values are represented as follows:
+ # (3) -$2.55
+ # (4) ($2.55)
+
+ value = value.sub(/^\((.+)\)$/, '-\1') # (4)
+ case value
+ when /^-?\D+[\d,]+\.\d{2}$/ # (1)
+ value.gsub!(/[^-\d.]/, "")
+ when /^-?\D+[\d.]+,\d{2}$/ # (2)
+ value.gsub!(/[^-\d,]/, "").sub!(/,/, ".")
+ end
+
+ super(value)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/oid.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/oid.rb
new file mode 100644
index 0000000000..d8c044320d
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/oid.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Oid < Type::Integer # :nodoc:
+ def type
+ :oid
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/point.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/point.rb
new file mode 100644
index 0000000000..02a9c506f6
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/point.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ Point = Struct.new(:x, :y)
+
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Point < Type::Value # :nodoc:
+ include Type::Helpers::Mutable
+
+ def type
+ :point
+ end
+
+ def cast(value)
+ case value
+ when ::String
+ return if value.blank?
+
+ if value[0] == "(" && value[-1] == ")"
+ value = value[1...-1]
+ end
+ x, y = value.split(",")
+ build_point(x, y)
+ when ::Array
+ build_point(*value)
+ else
+ value
+ end
+ end
+
+ def serialize(value)
+ case value
+ when ActiveRecord::Point
+ "(#{number_for_point(value.x)},#{number_for_point(value.y)})"
+ when ::Array
+ serialize(build_point(*value))
+ else
+ super
+ end
+ end
+
+ def type_cast_for_schema(value)
+ if ActiveRecord::Point === value
+ [value.x, value.y]
+ else
+ super
+ end
+ end
+
+ private
+
+ def number_for_point(number)
+ number.to_s.gsub(/\.0$/, "")
+ end
+
+ def build_point(x, y)
+ ActiveRecord::Point.new(Float(x), Float(y))
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/range.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/range.rb
new file mode 100644
index 0000000000..7d5d7d91e6
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/range.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Range < Type::Value # :nodoc:
+ attr_reader :subtype, :type
+ delegate :user_input_in_time_zone, to: :subtype
+
+ def initialize(subtype, type = :range)
+ @subtype = subtype
+ @type = type
+ end
+
+ def type_cast_for_schema(value)
+ value.inspect.gsub("Infinity", "::Float::INFINITY")
+ end
+
+ def cast_value(value)
+ return if value == "empty"
+ return value unless value.is_a?(::String)
+
+ extracted = extract_bounds(value)
+ from = type_cast_single extracted[:from]
+ to = type_cast_single extracted[:to]
+
+ if !infinity?(from) && extracted[:exclude_start]
+ raise ArgumentError, "The Ruby Range object does not support excluding the beginning of a Range. (unsupported value: '#{value}')"
+ end
+ ::Range.new(from, to, extracted[:exclude_end])
+ end
+
+ def serialize(value)
+ if value.is_a?(::Range)
+ from = type_cast_single_for_database(value.begin)
+ to = type_cast_single_for_database(value.end)
+ "[#{from},#{to}#{value.exclude_end? ? ')' : ']'}"
+ else
+ super
+ end
+ end
+
+ def ==(other)
+ other.is_a?(Range) &&
+ other.subtype == subtype &&
+ other.type == type
+ end
+
+ def map(value) # :nodoc:
+ new_begin = yield(value.begin)
+ new_end = yield(value.end)
+ ::Range.new(new_begin, new_end, value.exclude_end?)
+ end
+
+ private
+
+ def type_cast_single(value)
+ infinity?(value) ? value : @subtype.deserialize(value)
+ end
+
+ def type_cast_single_for_database(value)
+ infinity?(value) ? "" : @subtype.serialize(value)
+ end
+
+ def extract_bounds(value)
+ from, to = value[1..-2].split(",")
+ {
+ from: (value[1] == "," || from == "-infinity") ? infinity(negative: true) : from,
+ to: (value[-2] == "," || to == "infinity") ? infinity : to,
+ exclude_start: (value[0] == "("),
+ exclude_end: (value[-1] == ")")
+ }
+ end
+
+ def infinity(negative: false)
+ if subtype.respond_to?(:infinity)
+ subtype.infinity(negative: negative)
+ elsif negative
+ -::Float::INFINITY
+ else
+ ::Float::INFINITY
+ end
+ end
+
+ def infinity?(value)
+ value.respond_to?(:infinite?) && value.infinite?
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/specialized_string.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/specialized_string.rb
new file mode 100644
index 0000000000..4ad1344f05
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/specialized_string.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class SpecializedString < Type::String # :nodoc:
+ attr_reader :type
+
+ def initialize(type, **options)
+ @type = type
+ super(options)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/type_map_initializer.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/type_map_initializer.rb
new file mode 100644
index 0000000000..231278c184
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/type_map_initializer.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ # This class uses the data from PostgreSQL pg_type table to build
+ # the OID -> Type mapping.
+ # - OID is an integer representing the type.
+ # - Type is an OID::Type object.
+ # This class has side effects on the +store+ passed during initialization.
+ class TypeMapInitializer # :nodoc:
+ def initialize(store)
+ @store = store
+ end
+
+ def run(records)
+ nodes = records.reject { |row| @store.key? row["oid"].to_i }
+ mapped, nodes = nodes.partition { |row| @store.key? row["typname"] }
+ ranges, nodes = nodes.partition { |row| row["typtype"] == "r".freeze }
+ enums, nodes = nodes.partition { |row| row["typtype"] == "e".freeze }
+ domains, nodes = nodes.partition { |row| row["typtype"] == "d".freeze }
+ arrays, nodes = nodes.partition { |row| row["typinput"] == "array_in".freeze }
+ composites, nodes = nodes.partition { |row| row["typelem"].to_i != 0 }
+
+ mapped.each { |row| register_mapped_type(row) }
+ enums.each { |row| register_enum_type(row) }
+ domains.each { |row| register_domain_type(row) }
+ arrays.each { |row| register_array_type(row) }
+ ranges.each { |row| register_range_type(row) }
+ composites.each { |row| register_composite_type(row) }
+ end
+
+ def query_conditions_for_initial_load
+ known_type_names = @store.keys.map { |n| "'#{n}'" }
+ known_type_types = %w('r' 'e' 'd')
+ <<-SQL % [known_type_names.join(", "), known_type_types.join(", ")]
+ WHERE
+ t.typname IN (%s)
+ OR t.typtype IN (%s)
+ OR t.typinput = 'array_in(cstring,oid,integer)'::regprocedure
+ OR t.typelem != 0
+ SQL
+ end
+
+ private
+ def register_mapped_type(row)
+ alias_type row["oid"], row["typname"]
+ end
+
+ def register_enum_type(row)
+ register row["oid"], OID::Enum.new
+ end
+
+ def register_array_type(row)
+ register_with_subtype(row["oid"], row["typelem"].to_i) do |subtype|
+ OID::Array.new(subtype, row["typdelim"])
+ end
+ end
+
+ def register_range_type(row)
+ register_with_subtype(row["oid"], row["rngsubtype"].to_i) do |subtype|
+ OID::Range.new(subtype, row["typname"].to_sym)
+ end
+ end
+
+ def register_domain_type(row)
+ if base_type = @store.lookup(row["typbasetype"].to_i)
+ register row["oid"], base_type
+ else
+ warn "unknown base type (OID: #{row["typbasetype"]}) for domain #{row["typname"]}."
+ end
+ end
+
+ def register_composite_type(row)
+ if subtype = @store.lookup(row["typelem"].to_i)
+ register row["oid"], OID::Vector.new(row["typdelim"], subtype)
+ end
+ end
+
+ def register(oid, oid_type = nil, &block)
+ oid = assert_valid_registration(oid, oid_type || block)
+ if block_given?
+ @store.register_type(oid, &block)
+ else
+ @store.register_type(oid, oid_type)
+ end
+ end
+
+ def alias_type(oid, target)
+ oid = assert_valid_registration(oid, target)
+ @store.alias_type(oid, target)
+ end
+
+ def register_with_subtype(oid, target_oid)
+ if @store.key?(target_oid)
+ register(oid) do |_, *args|
+ yield @store.lookup(target_oid, *args)
+ end
+ end
+ end
+
+ def assert_valid_registration(oid, oid_type)
+ raise ArgumentError, "can't register nil type for OID #{oid}" if oid_type.nil?
+ oid.to_i
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/uuid.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/uuid.rb
new file mode 100644
index 0000000000..bc9b8dbfcf
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/uuid.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Uuid < Type::Value # :nodoc:
+ ACCEPTABLE_UUID = %r{\A(\{)?([a-fA-F0-9]{4}-?){8}(?(1)\}|)\z}
+
+ alias_method :serialize, :deserialize
+
+ def type
+ :uuid
+ end
+
+ def cast(value)
+ value.to_s[ACCEPTABLE_UUID, 0]
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/vector.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/vector.rb
new file mode 100644
index 0000000000..88ef626a16
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/vector.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Vector < Type::Value # :nodoc:
+ attr_reader :delim, :subtype
+
+ # +delim+ corresponds to the `typdelim` column in the pg_types
+ # table. +subtype+ is derived from the `typelem` column in the
+ # pg_types table.
+ def initialize(delim, subtype)
+ @delim = delim
+ @subtype = subtype
+ end
+
+ # FIXME: this should probably split on +delim+ and use +subtype+
+ # to cast the values. Unfortunately, the current Rails behavior
+ # is to just return the string.
+ def cast(value)
+ value
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/xml.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/xml.rb
new file mode 100644
index 0000000000..042f32fdc3
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/xml.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Xml < Type::String # :nodoc:
+ def type
+ :xml
+ end
+
+ def serialize(value)
+ return unless value
+ Data.new(super)
+ end
+
+ class Data # :nodoc:
+ def initialize(value)
+ @value = value
+ end
+
+ def to_s
+ @value
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/quoting.rb b/activerecord/lib/active_record/connection_adapters/postgresql/quoting.rb
new file mode 100644
index 0000000000..fc458d0c73
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/quoting.rb
@@ -0,0 +1,152 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module Quoting
+ # Escapes binary strings for bytea input to the database.
+ def escape_bytea(value)
+ @connection.escape_bytea(value) if value
+ end
+
+ # Unescapes bytea output from a database to the binary string it represents.
+ # NOTE: This is NOT an inverse of escape_bytea! This is only to be used
+ # on escaped binary output from database drive.
+ def unescape_bytea(value)
+ @connection.unescape_bytea(value) if value
+ end
+
+ # Quotes strings for use in SQL input.
+ def quote_string(s) #:nodoc:
+ @connection.escape(s)
+ end
+
+ # Checks the following cases:
+ #
+ # - table_name
+ # - "table.name"
+ # - schema_name.table_name
+ # - schema_name."table.name"
+ # - "schema.name".table_name
+ # - "schema.name"."table.name"
+ def quote_table_name(name) # :nodoc:
+ @quoted_table_names[name] ||= Utils.extract_schema_qualified_name(name.to_s).quoted.freeze
+ end
+
+ # Quotes schema names for use in SQL queries.
+ def quote_schema_name(name)
+ PG::Connection.quote_ident(name)
+ end
+
+ def quote_table_name_for_assignment(table, attr)
+ quote_column_name(attr)
+ end
+
+ # Quotes column names for use in SQL queries.
+ def quote_column_name(name) # :nodoc:
+ @quoted_column_names[name] ||= PG::Connection.quote_ident(super).freeze
+ end
+
+ # Quote date/time values for use in SQL input.
+ def quoted_date(value) #:nodoc:
+ if value.year <= 0
+ bce_year = format("%04d", -value.year + 1)
+ super.sub(/^-?\d+/, bce_year) + " BC"
+ else
+ super
+ end
+ end
+
+ def quoted_binary(value) # :nodoc:
+ "'#{escape_bytea(value.to_s)}'"
+ end
+
+ def quote_default_expression(value, column) # :nodoc:
+ if value.is_a?(Proc)
+ value.call
+ elsif column.type == :uuid && /\(\)/.match?(value)
+ value # Does not quote function default values for UUID columns
+ elsif column.respond_to?(:array?)
+ value = type_cast_from_column(column, value)
+ quote(value)
+ else
+ super
+ end
+ end
+
+ def lookup_cast_type_from_column(column) # :nodoc:
+ type_map.lookup(column.oid, column.fmod, column.sql_type)
+ end
+
+ private
+ def lookup_cast_type(sql_type)
+ super(query_value("SELECT #{quote(sql_type)}::regtype::oid", "SCHEMA").to_i)
+ end
+
+ def _quote(value)
+ case value
+ when OID::Xml::Data
+ "xml '#{quote_string(value.to_s)}'"
+ when OID::Bit::Data
+ if value.binary?
+ "B'#{value}'"
+ elsif value.hex?
+ "X'#{value}'"
+ end
+ when Float
+ if value.infinite? || value.nan?
+ "'#{value}'"
+ else
+ super
+ end
+ when OID::Array::Data
+ _quote(encode_array(value))
+ else
+ super
+ end
+ end
+
+ def _type_cast(value)
+ case value
+ when Type::Binary::Data
+ # Return a bind param hash with format as binary.
+ # See https://deveiate.org/code/pg/PG/Connection.html#method-i-exec_prepared-doc
+ # for more information
+ { value: value.to_s, format: 1 }
+ when OID::Xml::Data, OID::Bit::Data
+ value.to_s
+ when OID::Array::Data
+ encode_array(value)
+ else
+ super
+ end
+ end
+
+ def encode_array(array_data)
+ encoder = array_data.encoder
+ values = type_cast_array(array_data.values)
+
+ result = encoder.encode(values)
+ if encoding = determine_encoding_of_strings_in_array(values)
+ result.force_encoding(encoding)
+ end
+ result
+ end
+
+ def determine_encoding_of_strings_in_array(value)
+ case value
+ when ::Array then determine_encoding_of_strings_in_array(value.first)
+ when ::String then value.encoding
+ end
+ end
+
+ def type_cast_array(values)
+ case values
+ when ::Array then values.map { |item| type_cast_array(item) }
+ else _type_cast(values)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/referential_integrity.rb b/activerecord/lib/active_record/connection_adapters/postgresql/referential_integrity.rb
new file mode 100644
index 0000000000..386d22a9bd
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/referential_integrity.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module ReferentialIntegrity # :nodoc:
+ def supports_disable_referential_integrity? # :nodoc:
+ true
+ end
+
+ def disable_referential_integrity # :nodoc:
+ if supports_disable_referential_integrity?
+ original_exception = nil
+
+ begin
+ transaction(requires_new: true) do
+ execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} DISABLE TRIGGER ALL" }.join(";"))
+ end
+ rescue ActiveRecord::ActiveRecordError => e
+ original_exception = e
+ end
+
+ begin
+ yield
+ rescue ActiveRecord::InvalidForeignKey => e
+ warn <<-WARNING
+WARNING: Rails was not able to disable referential integrity.
+
+This is most likely caused due to missing permissions.
+Rails needs superuser privileges to disable referential integrity.
+
+ cause: #{original_exception.try(:message)}
+
+ WARNING
+ raise e
+ end
+
+ begin
+ transaction(requires_new: true) do
+ execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} ENABLE TRIGGER ALL" }.join(";"))
+ end
+ rescue ActiveRecord::ActiveRecordError
+ end
+ else
+ yield
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/schema_creation.rb b/activerecord/lib/active_record/connection_adapters/postgresql/schema_creation.rb
new file mode 100644
index 0000000000..59f661da25
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/schema_creation.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ class SchemaCreation < AbstractAdapter::SchemaCreation # :nodoc:
+ private
+ def add_column_options!(sql, options)
+ if options[:collation]
+ sql << " COLLATE \"#{options[:collation]}\""
+ end
+ super
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/schema_definitions.rb b/activerecord/lib/active_record/connection_adapters/postgresql/schema_definitions.rb
new file mode 100644
index 0000000000..f1489e4d69
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/schema_definitions.rb
@@ -0,0 +1,195 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module ColumnMethods
+ # Defines the primary key field.
+ # Use of the native PostgreSQL UUID type is supported, and can be used
+ # by defining your tables as such:
+ #
+ # create_table :stuffs, id: :uuid do |t|
+ # t.string :content
+ # t.timestamps
+ # end
+ #
+ # By default, this will use the +gen_random_uuid()+ function from the
+ # +pgcrypto+ extension. As that extension is only available in
+ # PostgreSQL 9.4+, for earlier versions an explicit default can be set
+ # to use +uuid_generate_v4()+ from the +uuid-ossp+ extension instead:
+ #
+ # create_table :stuffs, id: false do |t|
+ # t.primary_key :id, :uuid, default: "uuid_generate_v4()"
+ # t.uuid :foo_id
+ # t.timestamps
+ # end
+ #
+ # To enable the appropriate extension, which is a requirement, use
+ # the +enable_extension+ method in your migrations.
+ #
+ # To use a UUID primary key without any of the extensions, set the
+ # +:default+ option to +nil+:
+ #
+ # create_table :stuffs, id: false do |t|
+ # t.primary_key :id, :uuid, default: nil
+ # t.uuid :foo_id
+ # t.timestamps
+ # end
+ #
+ # You may also pass a custom stored procedure that returns a UUID or use a
+ # different UUID generation function from another library.
+ #
+ # Note that setting the UUID primary key default value to +nil+ will
+ # require you to assure that you always provide a UUID value before saving
+ # a record (as primary keys cannot be +nil+). This might be done via the
+ # +SecureRandom.uuid+ method and a +before_save+ callback, for instance.
+ def primary_key(name, type = :primary_key, **options)
+ options[:auto_increment] = true if [:integer, :bigint].include?(type) && !options.key?(:default)
+ if type == :uuid
+ options[:default] = options.fetch(:default, "gen_random_uuid()")
+ elsif options.delete(:auto_increment) == true && %i(integer bigint).include?(type)
+ type = if type == :bigint || options[:limit] == 8
+ :bigserial
+ else
+ :serial
+ end
+ end
+
+ super
+ end
+
+ def bigserial(*args, **options)
+ args.each { |name| column(name, :bigserial, options) }
+ end
+
+ def bit(*args, **options)
+ args.each { |name| column(name, :bit, options) }
+ end
+
+ def bit_varying(*args, **options)
+ args.each { |name| column(name, :bit_varying, options) }
+ end
+
+ def cidr(*args, **options)
+ args.each { |name| column(name, :cidr, options) }
+ end
+
+ def citext(*args, **options)
+ args.each { |name| column(name, :citext, options) }
+ end
+
+ def daterange(*args, **options)
+ args.each { |name| column(name, :daterange, options) }
+ end
+
+ def hstore(*args, **options)
+ args.each { |name| column(name, :hstore, options) }
+ end
+
+ def inet(*args, **options)
+ args.each { |name| column(name, :inet, options) }
+ end
+
+ def interval(*args, **options)
+ args.each { |name| column(name, :interval, options) }
+ end
+
+ def int4range(*args, **options)
+ args.each { |name| column(name, :int4range, options) }
+ end
+
+ def int8range(*args, **options)
+ args.each { |name| column(name, :int8range, options) }
+ end
+
+ def json(*args, **options)
+ args.each { |name| column(name, :json, options) }
+ end
+
+ def jsonb(*args, **options)
+ args.each { |name| column(name, :jsonb, options) }
+ end
+
+ def ltree(*args, **options)
+ args.each { |name| column(name, :ltree, options) }
+ end
+
+ def macaddr(*args, **options)
+ args.each { |name| column(name, :macaddr, options) }
+ end
+
+ def money(*args, **options)
+ args.each { |name| column(name, :money, options) }
+ end
+
+ def numrange(*args, **options)
+ args.each { |name| column(name, :numrange, options) }
+ end
+
+ def oid(*args, **options)
+ args.each { |name| column(name, :oid, options) }
+ end
+
+ def point(*args, **options)
+ args.each { |name| column(name, :point, options) }
+ end
+
+ def line(*args, **options)
+ args.each { |name| column(name, :line, options) }
+ end
+
+ def lseg(*args, **options)
+ args.each { |name| column(name, :lseg, options) }
+ end
+
+ def box(*args, **options)
+ args.each { |name| column(name, :box, options) }
+ end
+
+ def path(*args, **options)
+ args.each { |name| column(name, :path, options) }
+ end
+
+ def polygon(*args, **options)
+ args.each { |name| column(name, :polygon, options) }
+ end
+
+ def circle(*args, **options)
+ args.each { |name| column(name, :circle, options) }
+ end
+
+ def serial(*args, **options)
+ args.each { |name| column(name, :serial, options) }
+ end
+
+ def tsrange(*args, **options)
+ args.each { |name| column(name, :tsrange, options) }
+ end
+
+ def tstzrange(*args, **options)
+ args.each { |name| column(name, :tstzrange, options) }
+ end
+
+ def tsvector(*args, **options)
+ args.each { |name| column(name, :tsvector, options) }
+ end
+
+ def uuid(*args, **options)
+ args.each { |name| column(name, :uuid, options) }
+ end
+
+ def xml(*args, **options)
+ args.each { |name| column(name, :xml, options) }
+ end
+ end
+
+ class TableDefinition < ActiveRecord::ConnectionAdapters::TableDefinition
+ include ColumnMethods
+ end
+
+ class Table < ActiveRecord::ConnectionAdapters::Table
+ include ColumnMethods
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/schema_dumper.rb b/activerecord/lib/active_record/connection_adapters/postgresql/schema_dumper.rb
new file mode 100644
index 0000000000..12c6603081
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/schema_dumper.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module ColumnDumper # :nodoc:
+ # Adds +:array+ option to the default set
+ def prepare_column_options(column)
+ spec = super
+ spec[:array] = "true" if column.array?
+ spec
+ end
+
+ # Adds +:array+ as a valid migration key
+ def migration_keys
+ super + [:array]
+ end
+
+ private
+
+ def default_primary_key?(column)
+ schema_type(column) == :bigserial
+ end
+
+ def explicit_primary_key_default?(column)
+ column.type == :uuid || (column.type == :integer && !column.serial?)
+ end
+
+ def schema_type(column)
+ return super unless column.serial?
+
+ if column.bigint?
+ :bigserial
+ else
+ :serial
+ end
+ end
+
+ def schema_expression(column)
+ super unless column.serial?
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/schema_statements.rb b/activerecord/lib/active_record/connection_adapters/postgresql/schema_statements.rb
new file mode 100644
index 0000000000..780e642f21
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/schema_statements.rb
@@ -0,0 +1,670 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/string/strip"
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module SchemaStatements
+ # Drops the database specified on the +name+ attribute
+ # and creates it again using the provided +options+.
+ def recreate_database(name, options = {}) #:nodoc:
+ drop_database(name)
+ create_database(name, options)
+ end
+
+ # Create a new PostgreSQL database. Options include <tt>:owner</tt>, <tt>:template</tt>,
+ # <tt>:encoding</tt> (defaults to utf8), <tt>:collation</tt>, <tt>:ctype</tt>,
+ # <tt>:tablespace</tt>, and <tt>:connection_limit</tt> (note that MySQL uses
+ # <tt>:charset</tt> while PostgreSQL uses <tt>:encoding</tt>).
+ #
+ # Example:
+ # create_database config[:database], config
+ # create_database 'foo_development', encoding: 'unicode'
+ def create_database(name, options = {})
+ options = { encoding: "utf8" }.merge!(options.symbolize_keys)
+
+ option_string = options.inject("") do |memo, (key, value)|
+ memo += case key
+ when :owner
+ " OWNER = \"#{value}\""
+ when :template
+ " TEMPLATE = \"#{value}\""
+ when :encoding
+ " ENCODING = '#{value}'"
+ when :collation
+ " LC_COLLATE = '#{value}'"
+ when :ctype
+ " LC_CTYPE = '#{value}'"
+ when :tablespace
+ " TABLESPACE = \"#{value}\""
+ when :connection_limit
+ " CONNECTION LIMIT = #{value}"
+ else
+ ""
+ end
+ end
+
+ execute "CREATE DATABASE #{quote_table_name(name)}#{option_string}"
+ end
+
+ # Drops a PostgreSQL database.
+ #
+ # Example:
+ # drop_database 'matt_development'
+ def drop_database(name) #:nodoc:
+ execute "DROP DATABASE IF EXISTS #{quote_table_name(name)}"
+ end
+
+ def drop_table(table_name, options = {}) # :nodoc:
+ execute "DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_table_name(table_name)}#{' CASCADE' if options[:force] == :cascade}"
+ end
+
+ # Returns true if schema exists.
+ def schema_exists?(name)
+ query_value("SELECT COUNT(*) FROM pg_namespace WHERE nspname = #{quote(name)}", "SCHEMA").to_i > 0
+ end
+
+ # Verifies existence of an index with a given name.
+ def index_name_exists?(table_name, index_name, default = nil)
+ unless default.nil?
+ ActiveSupport::Deprecation.warn(<<-MSG.squish)
+ Passing default to #index_name_exists? is deprecated without replacement.
+ MSG
+ end
+ table = quoted_scope(table_name)
+ index = quoted_scope(index_name)
+
+ query_value(<<-SQL, "SCHEMA").to_i > 0
+ SELECT COUNT(*)
+ FROM pg_class t
+ INNER JOIN pg_index d ON t.oid = d.indrelid
+ INNER JOIN pg_class i ON d.indexrelid = i.oid
+ LEFT JOIN pg_namespace n ON n.oid = i.relnamespace
+ WHERE i.relkind = 'i'
+ AND i.relname = #{index[:name]}
+ AND t.relname = #{table[:name]}
+ AND n.nspname = #{index[:schema]}
+ SQL
+ end
+
+ # Returns an array of indexes for the given table.
+ def indexes(table_name, name = nil) # :nodoc:
+ if name
+ ActiveSupport::Deprecation.warn(<<-MSG.squish)
+ Passing name to #indexes is deprecated without replacement.
+ MSG
+ end
+
+ scope = quoted_scope(table_name)
+
+ result = query(<<-SQL, "SCHEMA")
+ SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid,
+ pg_catalog.obj_description(i.oid, 'pg_class') AS comment,
+ (SELECT COUNT(*) FROM pg_opclass o
+ JOIN (SELECT unnest(string_to_array(d.indclass::text, ' '))::int oid) c
+ ON o.oid = c.oid WHERE o.opcdefault = 'f')
+ FROM pg_class t
+ INNER JOIN pg_index d ON t.oid = d.indrelid
+ INNER JOIN pg_class i ON d.indexrelid = i.oid
+ LEFT JOIN pg_namespace n ON n.oid = i.relnamespace
+ WHERE i.relkind = 'i'
+ AND d.indisprimary = 'f'
+ AND t.relname = #{scope[:name]}
+ AND n.nspname = #{scope[:schema]}
+ ORDER BY i.relname
+ SQL
+
+ result.map do |row|
+ index_name = row[0]
+ unique = row[1]
+ indkey = row[2].split(" ").map(&:to_i)
+ inddef = row[3]
+ oid = row[4]
+ comment = row[5]
+ opclass = row[6]
+
+ using, expressions, where = inddef.scan(/ USING (\w+?) \((.+?)\)(?: WHERE (.+))?\z/).flatten
+
+ if indkey.include?(0) || opclass > 0
+ columns = expressions
+ else
+ columns = Hash[query(<<-SQL.strip_heredoc, "SCHEMA")].values_at(*indkey).compact
+ SELECT a.attnum, a.attname
+ FROM pg_attribute a
+ WHERE a.attrelid = #{oid}
+ AND a.attnum IN (#{indkey.join(",")})
+ SQL
+
+ # add info on sort order for columns (only desc order is explicitly specified, asc is the default)
+ orders = Hash[
+ expressions.scan(/(\w+) DESC/).flatten.map { |order_column| [order_column, :desc] }
+ ]
+ end
+
+ IndexDefinition.new(
+ table_name,
+ index_name,
+ unique,
+ columns,
+ orders: orders,
+ where: where,
+ using: using.to_sym,
+ comment: comment.presence
+ )
+ end
+ end
+
+ def table_options(table_name) # :nodoc:
+ if comment = table_comment(table_name)
+ { comment: comment }
+ end
+ end
+
+ # Returns a comment stored in database for given table
+ def table_comment(table_name) # :nodoc:
+ scope = quoted_scope(table_name, type: "BASE TABLE")
+ if scope[:name]
+ query_value(<<-SQL.strip_heredoc, "SCHEMA")
+ SELECT pg_catalog.obj_description(c.oid, 'pg_class')
+ FROM pg_catalog.pg_class c
+ LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
+ WHERE c.relname = #{scope[:name]}
+ AND c.relkind IN (#{scope[:type]})
+ AND n.nspname = #{scope[:schema]}
+ SQL
+ end
+ end
+
+ # Returns the current database name.
+ def current_database
+ query_value("SELECT current_database()", "SCHEMA")
+ end
+
+ # Returns the current schema name.
+ def current_schema
+ query_value("SELECT current_schema", "SCHEMA")
+ end
+
+ # Returns the current database encoding format.
+ def encoding
+ query_value("SELECT pg_encoding_to_char(encoding) FROM pg_database WHERE datname = current_database()", "SCHEMA")
+ end
+
+ # Returns the current database collation.
+ def collation
+ query_value("SELECT datcollate FROM pg_database WHERE datname = current_database()", "SCHEMA")
+ end
+
+ # Returns the current database ctype.
+ def ctype
+ query_value("SELECT datctype FROM pg_database WHERE datname = current_database()", "SCHEMA")
+ end
+
+ # Returns an array of schema names.
+ def schema_names
+ query_values(<<-SQL, "SCHEMA")
+ SELECT nspname
+ FROM pg_namespace
+ WHERE nspname !~ '^pg_.*'
+ AND nspname NOT IN ('information_schema')
+ ORDER by nspname;
+ SQL
+ end
+
+ # Creates a schema for the given schema name.
+ def create_schema(schema_name)
+ execute "CREATE SCHEMA #{quote_schema_name(schema_name)}"
+ end
+
+ # Drops the schema for the given schema name.
+ def drop_schema(schema_name, options = {})
+ execute "DROP SCHEMA#{' IF EXISTS' if options[:if_exists]} #{quote_schema_name(schema_name)} CASCADE"
+ end
+
+ # Sets the schema search path to a string of comma-separated schema names.
+ # Names beginning with $ have to be quoted (e.g. $user => '$user').
+ # See: https://www.postgresql.org/docs/current/static/ddl-schemas.html
+ #
+ # This should be not be called manually but set in database.yml.
+ def schema_search_path=(schema_csv)
+ if schema_csv
+ execute("SET search_path TO #{schema_csv}", "SCHEMA")
+ @schema_search_path = schema_csv
+ end
+ end
+
+ # Returns the active schema search path.
+ def schema_search_path
+ @schema_search_path ||= query_value("SHOW search_path", "SCHEMA")
+ end
+
+ # Returns the current client message level.
+ def client_min_messages
+ query_value("SHOW client_min_messages", "SCHEMA")
+ end
+
+ # Set the client message level.
+ def client_min_messages=(level)
+ execute("SET client_min_messages TO '#{level}'", "SCHEMA")
+ end
+
+ # Returns the sequence name for a table's primary key or some other specified key.
+ def default_sequence_name(table_name, pk = "id") #:nodoc:
+ result = serial_sequence(table_name, pk)
+ return nil unless result
+ Utils.extract_schema_qualified_name(result).to_s
+ rescue ActiveRecord::StatementInvalid
+ PostgreSQL::Name.new(nil, "#{table_name}_#{pk}_seq").to_s
+ end
+
+ def serial_sequence(table, column)
+ query_value("SELECT pg_get_serial_sequence(#{quote(table)}, #{quote(column)})", "SCHEMA")
+ end
+
+ # Sets the sequence of a table's primary key to the specified value.
+ def set_pk_sequence!(table, value) #:nodoc:
+ pk, sequence = pk_and_sequence_for(table)
+
+ if pk
+ if sequence
+ quoted_sequence = quote_table_name(sequence)
+
+ query_value("SELECT setval(#{quote(quoted_sequence)}, #{value})", "SCHEMA")
+ else
+ @logger.warn "#{table} has primary key #{pk} with no default sequence." if @logger
+ end
+ end
+ end
+
+ # Resets the sequence of a table's primary key to the maximum value.
+ def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:
+ unless pk && sequence
+ default_pk, default_sequence = pk_and_sequence_for(table)
+
+ pk ||= default_pk
+ sequence ||= default_sequence
+ end
+
+ if @logger && pk && !sequence
+ @logger.warn "#{table} has primary key #{pk} with no default sequence."
+ end
+
+ if pk && sequence
+ quoted_sequence = quote_table_name(sequence)
+ max_pk = query_value("SELECT MAX(#{quote_column_name pk}) FROM #{quote_table_name(table)}", "SCHEMA")
+ if max_pk.nil?
+ if postgresql_version >= 100000
+ minvalue = query_value("SELECT seqmin FROM pg_sequence WHERE seqrelid = #{quote(quoted_sequence)}::regclass", "SCHEMA")
+ else
+ minvalue = query_value("SELECT min_value FROM #{quoted_sequence}", "SCHEMA")
+ end
+ end
+
+ query_value("SELECT setval(#{quote(quoted_sequence)}, #{max_pk ? max_pk : minvalue}, #{max_pk ? true : false})", "SCHEMA")
+ end
+ end
+
+ # Returns a table's primary key and belonging sequence.
+ def pk_and_sequence_for(table) #:nodoc:
+ # First try looking for a sequence with a dependency on the
+ # given table's primary key.
+ result = query(<<-end_sql, "SCHEMA")[0]
+ SELECT attr.attname, nsp.nspname, seq.relname
+ FROM pg_class seq,
+ pg_attribute attr,
+ pg_depend dep,
+ pg_constraint cons,
+ pg_namespace nsp
+ WHERE seq.oid = dep.objid
+ AND seq.relkind = 'S'
+ AND attr.attrelid = dep.refobjid
+ AND attr.attnum = dep.refobjsubid
+ AND attr.attrelid = cons.conrelid
+ AND attr.attnum = cons.conkey[1]
+ AND seq.relnamespace = nsp.oid
+ AND cons.contype = 'p'
+ AND dep.classid = 'pg_class'::regclass
+ AND dep.refobjid = #{quote(quote_table_name(table))}::regclass
+ end_sql
+
+ if result.nil? || result.empty?
+ result = query(<<-end_sql, "SCHEMA")[0]
+ SELECT attr.attname, nsp.nspname,
+ CASE
+ WHEN pg_get_expr(def.adbin, def.adrelid) !~* 'nextval' THEN NULL
+ WHEN split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2) ~ '.' THEN
+ substr(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2),
+ strpos(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2), '.')+1)
+ ELSE split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2)
+ END
+ FROM pg_class t
+ JOIN pg_attribute attr ON (t.oid = attrelid)
+ JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)
+ JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])
+ JOIN pg_namespace nsp ON (t.relnamespace = nsp.oid)
+ WHERE t.oid = #{quote(quote_table_name(table))}::regclass
+ AND cons.contype = 'p'
+ AND pg_get_expr(def.adbin, def.adrelid) ~* 'nextval|uuid_generate'
+ end_sql
+ end
+
+ pk = result.shift
+ if result.last
+ [pk, PostgreSQL::Name.new(*result)]
+ else
+ [pk, nil]
+ end
+ rescue
+ nil
+ end
+
+ def primary_keys(table_name) # :nodoc:
+ query_values(<<-SQL.strip_heredoc, "SCHEMA")
+ SELECT a.attname
+ FROM (
+ SELECT indrelid, indkey, generate_subscripts(indkey, 1) idx
+ FROM pg_index
+ WHERE indrelid = #{quote(quote_table_name(table_name))}::regclass
+ AND indisprimary
+ ) i
+ JOIN pg_attribute a
+ ON a.attrelid = i.indrelid
+ AND a.attnum = i.indkey[i.idx]
+ ORDER BY i.idx
+ SQL
+ end
+
+ # Renames a table.
+ # Also renames a table's primary key sequence if the sequence name exists and
+ # matches the Active Record default.
+ #
+ # Example:
+ # rename_table('octopuses', 'octopi')
+ def rename_table(table_name, new_name)
+ clear_cache!
+ execute "ALTER TABLE #{quote_table_name(table_name)} RENAME TO #{quote_table_name(new_name)}"
+ pk, seq = pk_and_sequence_for(new_name)
+ if pk
+ idx = "#{table_name}_pkey"
+ new_idx = "#{new_name}_pkey"
+ execute "ALTER INDEX #{quote_table_name(idx)} RENAME TO #{quote_table_name(new_idx)}"
+ if seq && seq.identifier == "#{table_name}_#{pk}_seq"
+ new_seq = "#{new_name}_#{pk}_seq"
+ execute "ALTER TABLE #{seq.quoted} RENAME TO #{quote_table_name(new_seq)}"
+ end
+ end
+ rename_table_indexes(table_name, new_name)
+ end
+
+ def add_column(table_name, column_name, type, options = {}) #:nodoc:
+ clear_cache!
+ super
+ change_column_comment(table_name, column_name, options[:comment]) if options.key?(:comment)
+ end
+
+ def change_column(table_name, column_name, type, options = {}) #:nodoc:
+ clear_cache!
+ quoted_table_name = quote_table_name(table_name)
+ quoted_column_name = quote_column_name(column_name)
+ sql_type = type_to_sql(type, options)
+ sql = "ALTER TABLE #{quoted_table_name} ALTER COLUMN #{quoted_column_name} TYPE #{sql_type}".dup
+ if options[:collation]
+ sql << " COLLATE \"#{options[:collation]}\""
+ end
+ if options[:using]
+ sql << " USING #{options[:using]}"
+ elsif options[:cast_as]
+ cast_as_type = type_to_sql(options[:cast_as], options)
+ sql << " USING CAST(#{quoted_column_name} AS #{cast_as_type})"
+ end
+ execute sql
+
+ change_column_default(table_name, column_name, options[:default]) if options.key?(:default)
+ change_column_null(table_name, column_name, options[:null], options[:default]) if options.key?(:null)
+ change_column_comment(table_name, column_name, options[:comment]) if options.key?(:comment)
+ end
+
+ # Changes the default value of a table column.
+ def change_column_default(table_name, column_name, default_or_changes) # :nodoc:
+ clear_cache!
+ column = column_for(table_name, column_name)
+ return unless column
+
+ default = extract_new_default_value(default_or_changes)
+ alter_column_query = "ALTER TABLE #{quote_table_name(table_name)} ALTER COLUMN #{quote_column_name(column_name)} %s"
+ if default.nil?
+ # <tt>DEFAULT NULL</tt> results in the same behavior as <tt>DROP DEFAULT</tt>. However, PostgreSQL will
+ # cast the default to the columns type, which leaves us with a default like "default NULL::character varying".
+ execute alter_column_query % "DROP DEFAULT"
+ else
+ execute alter_column_query % "SET DEFAULT #{quote_default_expression(default, column)}"
+ end
+ end
+
+ def change_column_null(table_name, column_name, null, default = nil) #:nodoc:
+ clear_cache!
+ unless null || default.nil?
+ column = column_for(table_name, column_name)
+ execute("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote_default_expression(default, column)} WHERE #{quote_column_name(column_name)} IS NULL") if column
+ end
+ execute("ALTER TABLE #{quote_table_name(table_name)} ALTER #{quote_column_name(column_name)} #{null ? 'DROP' : 'SET'} NOT NULL")
+ end
+
+ # Adds comment for given table column or drops it if +comment+ is a +nil+
+ def change_column_comment(table_name, column_name, comment) # :nodoc:
+ clear_cache!
+ execute "COMMENT ON COLUMN #{quote_table_name(table_name)}.#{quote_column_name(column_name)} IS #{quote(comment)}"
+ end
+
+ # Adds comment for given table or drops it if +comment+ is a +nil+
+ def change_table_comment(table_name, comment) # :nodoc:
+ clear_cache!
+ execute "COMMENT ON TABLE #{quote_table_name(table_name)} IS #{quote(comment)}"
+ end
+
+ # Renames a column in a table.
+ def rename_column(table_name, column_name, new_column_name) #:nodoc:
+ clear_cache!
+ execute "ALTER TABLE #{quote_table_name(table_name)} RENAME COLUMN #{quote_column_name(column_name)} TO #{quote_column_name(new_column_name)}"
+ rename_column_indexes(table_name, column_name, new_column_name)
+ end
+
+ def add_index(table_name, column_name, options = {}) #:nodoc:
+ index_name, index_type, index_columns, index_options, index_algorithm, index_using, comment = add_index_options(table_name, column_name, options)
+ execute("CREATE #{index_type} INDEX #{index_algorithm} #{quote_column_name(index_name)} ON #{quote_table_name(table_name)} #{index_using} (#{index_columns})#{index_options}").tap do
+ execute "COMMENT ON INDEX #{quote_column_name(index_name)} IS #{quote(comment)}" if comment
+ end
+ end
+
+ def remove_index(table_name, options = {}) #:nodoc:
+ table = Utils.extract_schema_qualified_name(table_name.to_s)
+
+ if options.is_a?(Hash) && options.key?(:name)
+ provided_index = Utils.extract_schema_qualified_name(options[:name].to_s)
+
+ options[:name] = provided_index.identifier
+ table = PostgreSQL::Name.new(provided_index.schema, table.identifier) unless table.schema.present?
+
+ if provided_index.schema.present? && table.schema != provided_index.schema
+ raise ArgumentError.new("Index schema '#{provided_index.schema}' does not match table schema '#{table.schema}'")
+ end
+ end
+
+ index_to_remove = PostgreSQL::Name.new(table.schema, index_name_for_remove(table.to_s, options))
+ algorithm =
+ if options.is_a?(Hash) && options.key?(:algorithm)
+ index_algorithms.fetch(options[:algorithm]) do
+ raise ArgumentError.new("Algorithm must be one of the following: #{index_algorithms.keys.map(&:inspect).join(', ')}")
+ end
+ end
+ execute "DROP INDEX #{algorithm} #{quote_table_name(index_to_remove)}"
+ end
+
+ # Renames an index of a table. Raises error if length of new
+ # index name is greater than allowed limit.
+ def rename_index(table_name, old_name, new_name)
+ validate_index_length!(table_name, new_name)
+
+ execute "ALTER INDEX #{quote_column_name(old_name)} RENAME TO #{quote_table_name(new_name)}"
+ end
+
+ def foreign_keys(table_name)
+ scope = quoted_scope(table_name)
+ fk_info = exec_query(<<-SQL.strip_heredoc, "SCHEMA")
+ SELECT t2.oid::regclass::text AS to_table, a1.attname AS column, a2.attname AS primary_key, c.conname AS name, c.confupdtype AS on_update, c.confdeltype AS on_delete
+ FROM pg_constraint c
+ JOIN pg_class t1 ON c.conrelid = t1.oid
+ JOIN pg_class t2 ON c.confrelid = t2.oid
+ JOIN pg_attribute a1 ON a1.attnum = c.conkey[1] AND a1.attrelid = t1.oid
+ JOIN pg_attribute a2 ON a2.attnum = c.confkey[1] AND a2.attrelid = t2.oid
+ JOIN pg_namespace t3 ON c.connamespace = t3.oid
+ WHERE c.contype = 'f'
+ AND t1.relname = #{scope[:name]}
+ AND t3.nspname = #{scope[:schema]}
+ ORDER BY c.conname
+ SQL
+
+ fk_info.map do |row|
+ options = {
+ column: row["column"],
+ name: row["name"],
+ primary_key: row["primary_key"]
+ }
+
+ options[:on_delete] = extract_foreign_key_action(row["on_delete"])
+ options[:on_update] = extract_foreign_key_action(row["on_update"])
+
+ ForeignKeyDefinition.new(table_name, row["to_table"], options)
+ end
+ end
+
+ # Maps logical Rails types to PostgreSQL-specific data types.
+ def type_to_sql(type, limit: nil, precision: nil, scale: nil, array: nil, **) # :nodoc:
+ sql = \
+ case type.to_s
+ when "binary"
+ # PostgreSQL doesn't support limits on binary (bytea) columns.
+ # The hard limit is 1GB, because of a 32-bit size field, and TOAST.
+ case limit
+ when nil, 0..0x3fffffff; super(type)
+ else raise(ActiveRecordError, "No binary type has byte size #{limit}.")
+ end
+ when "text"
+ # PostgreSQL doesn't support limits on text columns.
+ # The hard limit is 1GB, according to section 8.3 in the manual.
+ case limit
+ when nil, 0..0x3fffffff; super(type)
+ else raise(ActiveRecordError, "The limit on text can be at most 1GB - 1byte.")
+ end
+ when "integer"
+ case limit
+ when 1, 2; "smallint"
+ when nil, 3, 4; "integer"
+ when 5..8; "bigint"
+ else raise(ActiveRecordError, "No integer type has byte size #{limit}. Use a numeric with scale 0 instead.")
+ end
+ else
+ super
+ end
+
+ sql = "#{sql}[]" if array && type != :primary_key
+ sql
+ end
+
+ # PostgreSQL requires the ORDER BY columns in the select list for distinct queries, and
+ # requires that the ORDER BY include the distinct column.
+ def columns_for_distinct(columns, orders) #:nodoc:
+ order_columns = orders.reject(&:blank?).map { |s|
+ # Convert Arel node to string
+ s = s.to_sql unless s.is_a?(String)
+ # Remove any ASC/DESC modifiers
+ s.gsub(/\s+(?:ASC|DESC)\b/i, "")
+ .gsub(/\s+NULLS\s+(?:FIRST|LAST)\b/i, "")
+ }.reject(&:blank?).map.with_index { |column, i| "#{column} AS alias_#{i}" }
+
+ [super, *order_columns].join(", ")
+ end
+
+ private
+ def schema_creation
+ PostgreSQL::SchemaCreation.new(self)
+ end
+
+ def create_table_definition(*args)
+ PostgreSQL::TableDefinition.new(*args)
+ end
+
+ def new_column_from_field(table_name, field)
+ column_name, type, default, notnull, oid, fmod, collation, comment = field
+ type_metadata = fetch_type_metadata(column_name, type, oid.to_i, fmod.to_i)
+ default_value = extract_value_from_default(default)
+ default_function = extract_default_function(default_value, default)
+
+ PostgreSQLColumn.new(
+ column_name,
+ default_value,
+ type_metadata,
+ !notnull,
+ table_name,
+ default_function,
+ collation,
+ comment: comment.presence
+ )
+ end
+
+ def fetch_type_metadata(column_name, sql_type, oid, fmod)
+ cast_type = get_oid_type(oid, fmod, column_name, sql_type)
+ simple_type = SqlTypeMetadata.new(
+ sql_type: sql_type,
+ type: cast_type.type,
+ limit: cast_type.limit,
+ precision: cast_type.precision,
+ scale: cast_type.scale,
+ )
+ PostgreSQLTypeMetadata.new(simple_type, oid: oid, fmod: fmod)
+ end
+
+ def extract_foreign_key_action(specifier)
+ case specifier
+ when "c"; :cascade
+ when "n"; :nullify
+ when "r"; :restrict
+ end
+ end
+
+ def data_source_sql(name = nil, type: nil)
+ scope = quoted_scope(name, type: type)
+ scope[:type] ||= "'r','v','m'" # (r)elation/table, (v)iew, (m)aterialized view
+
+ sql = "SELECT c.relname FROM pg_class c LEFT JOIN pg_namespace n ON n.oid = c.relnamespace".dup
+ sql << " WHERE n.nspname = #{scope[:schema]}"
+ sql << " AND c.relname = #{scope[:name]}" if scope[:name]
+ sql << " AND c.relkind IN (#{scope[:type]})"
+ sql
+ end
+
+ def quoted_scope(name = nil, type: nil)
+ schema, name = extract_schema_qualified_name(name)
+ type = \
+ case type
+ when "BASE TABLE"
+ "'r'"
+ when "VIEW"
+ "'v','m'"
+ end
+ scope = {}
+ scope[:schema] = schema ? quote(schema) : "ANY (current_schemas(false))"
+ scope[:name] = quote(name) if name
+ scope[:type] = type if type
+ scope
+ end
+
+ def extract_schema_qualified_name(string)
+ name = Utils.extract_schema_qualified_name(string.to_s)
+ [name.schema, name.identifier]
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/type_metadata.rb b/activerecord/lib/active_record/connection_adapters/postgresql/type_metadata.rb
new file mode 100644
index 0000000000..b252a76caa
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/type_metadata.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ class PostgreSQLTypeMetadata < DelegateClass(SqlTypeMetadata)
+ undef to_yaml if method_defined?(:to_yaml)
+
+ attr_reader :oid, :fmod, :array
+
+ def initialize(type_metadata, oid: nil, fmod: nil)
+ super(type_metadata)
+ @type_metadata = type_metadata
+ @oid = oid
+ @fmod = fmod
+ @array = /\[\]$/.match?(type_metadata.sql_type)
+ end
+
+ def sql_type
+ super.gsub(/\[\]$/, "".freeze)
+ end
+
+ def ==(other)
+ other.is_a?(PostgreSQLTypeMetadata) &&
+ attributes_for_hash == other.attributes_for_hash
+ end
+ alias eql? ==
+
+ def hash
+ attributes_for_hash.hash
+ end
+
+ protected
+
+ def attributes_for_hash
+ [self.class, @type_metadata, oid, fmod]
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/utils.rb b/activerecord/lib/active_record/connection_adapters/postgresql/utils.rb
new file mode 100644
index 0000000000..bfd300723d
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/utils.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ # Value Object to hold a schema qualified name.
+ # This is usually the name of a PostgreSQL relation but it can also represent
+ # schema qualified type names. +schema+ and +identifier+ are unquoted to prevent
+ # double quoting.
+ class Name # :nodoc:
+ SEPARATOR = "."
+ attr_reader :schema, :identifier
+
+ def initialize(schema, identifier)
+ @schema, @identifier = unquote(schema), unquote(identifier)
+ end
+
+ def to_s
+ parts.join SEPARATOR
+ end
+
+ def quoted
+ if schema
+ PG::Connection.quote_ident(schema) << SEPARATOR << PG::Connection.quote_ident(identifier)
+ else
+ PG::Connection.quote_ident(identifier)
+ end
+ end
+
+ def ==(o)
+ o.class == self.class && o.parts == parts
+ end
+ alias_method :eql?, :==
+
+ def hash
+ parts.hash
+ end
+
+ protected
+
+ def parts
+ @parts ||= [@schema, @identifier].compact
+ end
+
+ private
+ def unquote(part)
+ if part && part.start_with?('"')
+ part[1..-2]
+ else
+ part
+ end
+ end
+ end
+
+ module Utils # :nodoc:
+ extend self
+
+ # Returns an instance of <tt>ActiveRecord::ConnectionAdapters::PostgreSQL::Name</tt>
+ # extracted from +string+.
+ # +schema+ is +nil+ if not specified in +string+.
+ # +schema+ and +identifier+ exclude surrounding quotes (regardless of whether provided in +string+)
+ # +string+ supports the range of schema/table references understood by PostgreSQL, for example:
+ #
+ # * <tt>table_name</tt>
+ # * <tt>"table.name"</tt>
+ # * <tt>schema_name.table_name</tt>
+ # * <tt>schema_name."table.name"</tt>
+ # * <tt>"schema_name".table_name</tt>
+ # * <tt>"schema.name"."table name"</tt>
+ def extract_schema_qualified_name(string)
+ schema, table = string.scan(/[^".\s]+|"[^"]*"/)
+ if table.nil?
+ table = schema
+ schema = nil
+ end
+ PostgreSQL::Name.new(schema, table)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb b/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb
new file mode 100644
index 0000000000..3b4439fc46
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb
@@ -0,0 +1,852 @@
+# frozen_string_literal: true
+
+# Make sure we're using pg high enough for type casts and Ruby 2.2+ compatibility
+gem "pg", "~> 0.18"
+require "pg"
+
+require_relative "abstract_adapter"
+require_relative "statement_pool"
+require_relative "postgresql/column"
+require_relative "postgresql/database_statements"
+require_relative "postgresql/explain_pretty_printer"
+require_relative "postgresql/oid"
+require_relative "postgresql/quoting"
+require_relative "postgresql/referential_integrity"
+require_relative "postgresql/schema_creation"
+require_relative "postgresql/schema_definitions"
+require_relative "postgresql/schema_dumper"
+require_relative "postgresql/schema_statements"
+require_relative "postgresql/type_metadata"
+require_relative "postgresql/utils"
+
+module ActiveRecord
+ module ConnectionHandling # :nodoc:
+ # Establishes a connection to the database that's used by all Active Record objects
+ def postgresql_connection(config)
+ conn_params = config.symbolize_keys
+
+ conn_params.delete_if { |_, v| v.nil? }
+
+ # Map ActiveRecords param names to PGs.
+ conn_params[:user] = conn_params.delete(:username) if conn_params[:username]
+ conn_params[:dbname] = conn_params.delete(:database) if conn_params[:database]
+
+ # Forward only valid config params to PG::Connection.connect.
+ valid_conn_param_keys = PG::Connection.conndefaults_hash.keys + [:requiressl]
+ conn_params.slice!(*valid_conn_param_keys)
+
+ # The postgres drivers don't allow the creation of an unconnected PG::Connection object,
+ # so just pass a nil connection object for the time being.
+ ConnectionAdapters::PostgreSQLAdapter.new(nil, logger, conn_params, config)
+ end
+ end
+
+ module ConnectionAdapters
+ # The PostgreSQL adapter works with the native C (https://bitbucket.org/ged/ruby-pg) driver.
+ #
+ # Options:
+ #
+ # * <tt>:host</tt> - Defaults to a Unix-domain socket in /tmp. On machines without Unix-domain sockets,
+ # the default is to connect to localhost.
+ # * <tt>:port</tt> - Defaults to 5432.
+ # * <tt>:username</tt> - Defaults to be the same as the operating system name of the user running the application.
+ # * <tt>:password</tt> - Password to be used if the server demands password authentication.
+ # * <tt>:database</tt> - Defaults to be the same as the user name.
+ # * <tt>:schema_search_path</tt> - An optional schema search path for the connection given
+ # as a string of comma-separated schema names. This is backward-compatible with the <tt>:schema_order</tt> option.
+ # * <tt>:encoding</tt> - An optional client encoding that is used in a <tt>SET client_encoding TO
+ # <encoding></tt> call on the connection.
+ # * <tt>:min_messages</tt> - An optional client min messages that is used in a
+ # <tt>SET client_min_messages TO <min_messages></tt> call on the connection.
+ # * <tt>:variables</tt> - An optional hash of additional parameters that
+ # will be used in <tt>SET SESSION key = val</tt> calls on the connection.
+ # * <tt>:insert_returning</tt> - An optional boolean to control the use of <tt>RETURNING</tt> for <tt>INSERT</tt> statements
+ # defaults to true.
+ #
+ # Any further options are used as connection parameters to libpq. See
+ # https://www.postgresql.org/docs/current/static/libpq-connect.html for the
+ # list of parameters.
+ #
+ # In addition, default connection parameters of libpq can be set per environment variables.
+ # See https://www.postgresql.org/docs/current/static/libpq-envars.html .
+ class PostgreSQLAdapter < AbstractAdapter
+ ADAPTER_NAME = "PostgreSQL".freeze
+
+ NATIVE_DATABASE_TYPES = {
+ primary_key: "bigserial primary key",
+ string: { name: "character varying" },
+ text: { name: "text" },
+ integer: { name: "integer" },
+ float: { name: "float" },
+ decimal: { name: "decimal" },
+ datetime: { name: "timestamp" },
+ time: { name: "time" },
+ date: { name: "date" },
+ daterange: { name: "daterange" },
+ numrange: { name: "numrange" },
+ tsrange: { name: "tsrange" },
+ tstzrange: { name: "tstzrange" },
+ int4range: { name: "int4range" },
+ int8range: { name: "int8range" },
+ binary: { name: "bytea" },
+ boolean: { name: "boolean" },
+ xml: { name: "xml" },
+ tsvector: { name: "tsvector" },
+ hstore: { name: "hstore" },
+ inet: { name: "inet" },
+ cidr: { name: "cidr" },
+ macaddr: { name: "macaddr" },
+ uuid: { name: "uuid" },
+ json: { name: "json" },
+ jsonb: { name: "jsonb" },
+ ltree: { name: "ltree" },
+ citext: { name: "citext" },
+ point: { name: "point" },
+ line: { name: "line" },
+ lseg: { name: "lseg" },
+ box: { name: "box" },
+ path: { name: "path" },
+ polygon: { name: "polygon" },
+ circle: { name: "circle" },
+ bit: { name: "bit" },
+ bit_varying: { name: "bit varying" },
+ money: { name: "money" },
+ interval: { name: "interval" },
+ oid: { name: "oid" },
+ }
+
+ OID = PostgreSQL::OID #:nodoc:
+
+ include PostgreSQL::Quoting
+ include PostgreSQL::ReferentialIntegrity
+ include PostgreSQL::SchemaStatements
+ include PostgreSQL::DatabaseStatements
+ include PostgreSQL::ColumnDumper
+
+ def supports_index_sort_order?
+ true
+ end
+
+ def supports_partial_index?
+ true
+ end
+
+ def supports_expression_index?
+ true
+ end
+
+ def supports_transaction_isolation?
+ true
+ end
+
+ def supports_foreign_keys?
+ true
+ end
+
+ def supports_views?
+ true
+ end
+
+ def supports_datetime_with_precision?
+ true
+ end
+
+ def supports_json?
+ postgresql_version >= 90200
+ end
+
+ def supports_comments?
+ true
+ end
+
+ def supports_savepoints?
+ true
+ end
+
+ def index_algorithms
+ { concurrently: "CONCURRENTLY" }
+ end
+
+ class StatementPool < ConnectionAdapters::StatementPool
+ def initialize(connection, max)
+ super(max)
+ @connection = connection
+ @counter = 0
+ end
+
+ def next_key
+ "a#{@counter + 1}"
+ end
+
+ def []=(sql, key)
+ super.tap { @counter += 1 }
+ end
+
+ private
+
+ def dealloc(key)
+ @connection.query "DEALLOCATE #{key}" if connection_active?
+ rescue PG::Error
+ end
+
+ def connection_active?
+ @connection.status == PG::CONNECTION_OK
+ rescue PG::Error
+ false
+ end
+ end
+
+ # Initializes and connects a PostgreSQL adapter.
+ def initialize(connection, logger, connection_parameters, config)
+ super(connection, logger, config)
+
+ @connection_parameters = connection_parameters
+
+ # @local_tz is initialized as nil to avoid warnings when connect tries to use it
+ @local_tz = nil
+ @max_identifier_length = nil
+
+ connect
+ add_pg_encoders
+ @statements = StatementPool.new @connection,
+ self.class.type_cast_config_to_integer(config[:statement_limit])
+
+ if postgresql_version < 90100
+ raise "Your version of PostgreSQL (#{postgresql_version}) is too old. Active Record supports PostgreSQL >= 9.1."
+ end
+
+ add_pg_decoders
+
+ @type_map = Type::HashLookupTypeMap.new
+ initialize_type_map
+ @local_tz = execute("SHOW TIME ZONE", "SCHEMA").first["TimeZone"]
+ @use_insert_returning = @config.key?(:insert_returning) ? self.class.type_cast_config_to_boolean(@config[:insert_returning]) : true
+ end
+
+ # Clears the prepared statements cache.
+ def clear_cache!
+ @lock.synchronize do
+ @statements.clear
+ end
+ end
+
+ def truncate(table_name, name = nil)
+ exec_query "TRUNCATE TABLE #{quote_table_name(table_name)}", name, []
+ end
+
+ # Is this connection alive and ready for queries?
+ def active?
+ @lock.synchronize do
+ @connection.query "SELECT 1"
+ end
+ true
+ rescue PG::Error
+ false
+ end
+
+ # Close then reopen the connection.
+ def reconnect!
+ @lock.synchronize do
+ super
+ @connection.reset
+ configure_connection
+ end
+ end
+
+ def reset!
+ @lock.synchronize do
+ clear_cache!
+ reset_transaction
+ unless @connection.transaction_status == ::PG::PQTRANS_IDLE
+ @connection.query "ROLLBACK"
+ end
+ @connection.query "DISCARD ALL"
+ configure_connection
+ end
+ end
+
+ # Disconnects from the database if already connected. Otherwise, this
+ # method does nothing.
+ def disconnect!
+ @lock.synchronize do
+ super
+ @connection.close rescue nil
+ end
+ end
+
+ def native_database_types #:nodoc:
+ NATIVE_DATABASE_TYPES
+ end
+
+ def set_standard_conforming_strings
+ execute("SET standard_conforming_strings = on", "SCHEMA")
+ end
+
+ def supports_ddl_transactions?
+ true
+ end
+
+ def supports_advisory_locks?
+ true
+ end
+
+ def supports_explain?
+ true
+ end
+
+ def supports_extensions?
+ true
+ end
+
+ def supports_ranges?
+ # Range datatypes weren't introduced until PostgreSQL 9.2
+ postgresql_version >= 90200
+ end
+
+ def supports_materialized_views?
+ postgresql_version >= 90300
+ end
+
+ def supports_pgcrypto_uuid?
+ postgresql_version >= 90400
+ end
+
+ def get_advisory_lock(lock_id) # :nodoc:
+ unless lock_id.is_a?(Integer) && lock_id.bit_length <= 63
+ raise(ArgumentError, "Postgres requires advisory lock ids to be a signed 64 bit integer")
+ end
+ query_value("SELECT pg_try_advisory_lock(#{lock_id})")
+ end
+
+ def release_advisory_lock(lock_id) # :nodoc:
+ unless lock_id.is_a?(Integer) && lock_id.bit_length <= 63
+ raise(ArgumentError, "Postgres requires advisory lock ids to be a signed 64 bit integer")
+ end
+ query_value("SELECT pg_advisory_unlock(#{lock_id})")
+ end
+
+ def enable_extension(name)
+ exec_query("CREATE EXTENSION IF NOT EXISTS \"#{name}\"").tap {
+ reload_type_map
+ }
+ end
+
+ def disable_extension(name)
+ exec_query("DROP EXTENSION IF EXISTS \"#{name}\" CASCADE").tap {
+ reload_type_map
+ }
+ end
+
+ def extension_enabled?(name)
+ if supports_extensions?
+ res = exec_query("SELECT EXISTS(SELECT * FROM pg_available_extensions WHERE name = '#{name}' AND installed_version IS NOT NULL) as enabled", "SCHEMA")
+ res.cast_values.first
+ end
+ end
+
+ def extensions
+ if supports_extensions?
+ exec_query("SELECT extname FROM pg_extension", "SCHEMA").cast_values
+ else
+ super
+ end
+ end
+
+ # Returns the configured supported identifier length supported by PostgreSQL
+ def table_alias_length
+ @max_identifier_length ||= query_value("SHOW max_identifier_length", "SCHEMA").to_i
+ end
+ alias index_name_length table_alias_length
+
+ # Set the authorized user for this session
+ def session_auth=(user)
+ clear_cache!
+ execute("SET SESSION AUTHORIZATION #{user}")
+ end
+
+ def use_insert_returning?
+ @use_insert_returning
+ end
+
+ def update_table_definition(table_name, base) #:nodoc:
+ PostgreSQL::Table.new(table_name, base)
+ end
+
+ def column_name_for_operation(operation, node) # :nodoc:
+ OPERATION_ALIASES.fetch(operation) { operation.downcase }
+ end
+
+ OPERATION_ALIASES = { # :nodoc:
+ "maximum" => "max",
+ "minimum" => "min",
+ "average" => "avg",
+ }
+
+ # Returns the version of the connected PostgreSQL server.
+ def postgresql_version
+ @connection.server_version
+ end
+
+ def default_index_type?(index) # :nodoc:
+ index.using == :btree || super
+ end
+
+ private
+
+ # See https://www.postgresql.org/docs/current/static/errcodes-appendix.html
+ VALUE_LIMIT_VIOLATION = "22001"
+ NUMERIC_VALUE_OUT_OF_RANGE = "22003"
+ NOT_NULL_VIOLATION = "23502"
+ FOREIGN_KEY_VIOLATION = "23503"
+ UNIQUE_VIOLATION = "23505"
+ SERIALIZATION_FAILURE = "40001"
+ DEADLOCK_DETECTED = "40P01"
+
+ def translate_exception(exception, message)
+ return exception unless exception.respond_to?(:result)
+
+ case exception.result.try(:error_field, PG::PG_DIAG_SQLSTATE)
+ when UNIQUE_VIOLATION
+ RecordNotUnique.new(message)
+ when FOREIGN_KEY_VIOLATION
+ InvalidForeignKey.new(message)
+ when VALUE_LIMIT_VIOLATION
+ ValueTooLong.new(message)
+ when NUMERIC_VALUE_OUT_OF_RANGE
+ RangeError.new(message)
+ when NOT_NULL_VIOLATION
+ NotNullViolation.new(message)
+ when SERIALIZATION_FAILURE
+ SerializationFailure.new(message)
+ when DEADLOCK_DETECTED
+ Deadlocked.new(message)
+ else
+ super
+ end
+ end
+
+ def get_oid_type(oid, fmod, column_name, sql_type = "".freeze)
+ if !type_map.key?(oid)
+ load_additional_types([oid])
+ end
+
+ type_map.fetch(oid, fmod, sql_type) {
+ warn "unknown OID #{oid}: failed to recognize type of '#{column_name}'. It will be treated as String."
+ Type.default_value.tap do |cast_type|
+ type_map.register_type(oid, cast_type)
+ end
+ }
+ end
+
+ def initialize_type_map(m = type_map)
+ register_class_with_limit m, "int2", Type::Integer
+ register_class_with_limit m, "int4", Type::Integer
+ register_class_with_limit m, "int8", Type::Integer
+ m.register_type "oid", OID::Oid.new
+ m.register_type "float4", Type::Float.new
+ m.alias_type "float8", "float4"
+ m.register_type "text", Type::Text.new
+ register_class_with_limit m, "varchar", Type::String
+ m.alias_type "char", "varchar"
+ m.alias_type "name", "varchar"
+ m.alias_type "bpchar", "varchar"
+ m.register_type "bool", Type::Boolean.new
+ register_class_with_limit m, "bit", OID::Bit
+ register_class_with_limit m, "varbit", OID::BitVarying
+ m.alias_type "timestamptz", "timestamp"
+ m.register_type "date", Type::Date.new
+
+ m.register_type "money", OID::Money.new
+ m.register_type "bytea", OID::Bytea.new
+ m.register_type "point", OID::Point.new
+ m.register_type "hstore", OID::Hstore.new
+ m.register_type "json", Type::Json.new
+ m.register_type "jsonb", OID::Jsonb.new
+ m.register_type "cidr", OID::Cidr.new
+ m.register_type "inet", OID::Inet.new
+ m.register_type "uuid", OID::Uuid.new
+ m.register_type "xml", OID::Xml.new
+ m.register_type "tsvector", OID::SpecializedString.new(:tsvector)
+ m.register_type "macaddr", OID::SpecializedString.new(:macaddr)
+ m.register_type "citext", OID::SpecializedString.new(:citext)
+ m.register_type "ltree", OID::SpecializedString.new(:ltree)
+ m.register_type "line", OID::SpecializedString.new(:line)
+ m.register_type "lseg", OID::SpecializedString.new(:lseg)
+ m.register_type "box", OID::SpecializedString.new(:box)
+ m.register_type "path", OID::SpecializedString.new(:path)
+ m.register_type "polygon", OID::SpecializedString.new(:polygon)
+ m.register_type "circle", OID::SpecializedString.new(:circle)
+
+ m.register_type "interval" do |_, _, sql_type|
+ precision = extract_precision(sql_type)
+ OID::SpecializedString.new(:interval, precision: precision)
+ end
+
+ register_class_with_precision m, "time", Type::Time
+ register_class_with_precision m, "timestamp", OID::DateTime
+
+ m.register_type "numeric" do |_, fmod, sql_type|
+ precision = extract_precision(sql_type)
+ scale = extract_scale(sql_type)
+
+ # The type for the numeric depends on the width of the field,
+ # so we'll do something special here.
+ #
+ # When dealing with decimal columns:
+ #
+ # places after decimal = fmod - 4 & 0xffff
+ # places before decimal = (fmod - 4) >> 16 & 0xffff
+ if fmod && (fmod - 4 & 0xffff).zero?
+ # FIXME: Remove this class, and the second argument to
+ # lookups on PG
+ Type::DecimalWithoutScale.new(precision: precision)
+ else
+ OID::Decimal.new(precision: precision, scale: scale)
+ end
+ end
+
+ load_additional_types
+ end
+
+ def extract_limit(sql_type)
+ case sql_type
+ when /^bigint/i, /^int8/i
+ 8
+ when /^smallint/i
+ 2
+ else
+ super
+ end
+ end
+
+ # Extracts the value from a PostgreSQL column default definition.
+ def extract_value_from_default(default)
+ case default
+ # Quoted types
+ when /\A[\(B]?'(.*)'.*::"?([\w. ]+)"?(?:\[\])?\z/m
+ # The default 'now'::date is CURRENT_DATE
+ if $1 == "now".freeze && $2 == "date".freeze
+ nil
+ else
+ $1.gsub("''".freeze, "'".freeze)
+ end
+ # Boolean types
+ when "true".freeze, "false".freeze
+ default
+ # Numeric types
+ when /\A\(?(-?\d+(\.\d*)?)\)?(::bigint)?\z/
+ $1
+ # Object identifier types
+ when /\A-?\d+\z/
+ $1
+ else
+ # Anything else is blank, some user type, or some function
+ # and we can't know the value of that, so return nil.
+ nil
+ end
+ end
+
+ def extract_default_function(default_value, default)
+ default if has_default_function?(default_value, default)
+ end
+
+ def has_default_function?(default_value, default)
+ !default_value && %r{\w+\(.*\)|\(.*\)::\w+|CURRENT_DATE|CURRENT_TIMESTAMP}.match?(default)
+ end
+
+ def load_additional_types(oids = nil)
+ initializer = OID::TypeMapInitializer.new(type_map)
+
+ if supports_ranges?
+ query = <<-SQL
+ SELECT t.oid, t.typname, t.typelem, t.typdelim, t.typinput, r.rngsubtype, t.typtype, t.typbasetype
+ FROM pg_type as t
+ LEFT JOIN pg_range as r ON oid = rngtypid
+ SQL
+ else
+ query = <<-SQL
+ SELECT t.oid, t.typname, t.typelem, t.typdelim, t.typinput, t.typtype, t.typbasetype
+ FROM pg_type as t
+ SQL
+ end
+
+ if oids
+ query += "WHERE t.oid::integer IN (%s)" % oids.join(", ")
+ else
+ query += initializer.query_conditions_for_initial_load
+ end
+
+ execute_and_clear(query, "SCHEMA", []) do |records|
+ initializer.run(records)
+ end
+ end
+
+ FEATURE_NOT_SUPPORTED = "0A000" #:nodoc:
+
+ def execute_and_clear(sql, name, binds, prepare: false)
+ if without_prepared_statement?(binds)
+ result = exec_no_cache(sql, name, [])
+ elsif !prepare
+ result = exec_no_cache(sql, name, binds)
+ else
+ result = exec_cache(sql, name, binds)
+ end
+ ret = yield result
+ result.clear
+ ret
+ end
+
+ def exec_no_cache(sql, name, binds)
+ type_casted_binds = type_casted_binds(binds)
+ log(sql, name, binds, type_casted_binds) do
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ @connection.async_exec(sql, type_casted_binds)
+ end
+ end
+ end
+
+ def exec_cache(sql, name, binds)
+ stmt_key = prepare_statement(sql)
+ type_casted_binds = type_casted_binds(binds)
+
+ log(sql, name, binds, type_casted_binds, stmt_key) do
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ @connection.exec_prepared(stmt_key, type_casted_binds)
+ end
+ end
+ rescue ActiveRecord::StatementInvalid => e
+ raise unless is_cached_plan_failure?(e)
+
+ # Nothing we can do if we are in a transaction because all commands
+ # will raise InFailedSQLTransaction
+ if in_transaction?
+ raise ActiveRecord::PreparedStatementCacheExpired.new(e.cause.message)
+ else
+ @lock.synchronize do
+ # outside of transactions we can simply flush this query and retry
+ @statements.delete sql_key(sql)
+ end
+ retry
+ end
+ end
+
+ # Annoyingly, the code for prepared statements whose return value may
+ # have changed is FEATURE_NOT_SUPPORTED.
+ #
+ # This covers various different error types so we need to do additional
+ # work to classify the exception definitively as a
+ # ActiveRecord::PreparedStatementCacheExpired
+ #
+ # Check here for more details:
+ # http://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/backend/utils/cache/plancache.c#l573
+ CACHED_PLAN_HEURISTIC = "cached plan must not change result type".freeze
+ def is_cached_plan_failure?(e)
+ pgerror = e.cause
+ code = pgerror.result.result_error_field(PG::PG_DIAG_SQLSTATE)
+ code == FEATURE_NOT_SUPPORTED && pgerror.message.include?(CACHED_PLAN_HEURISTIC)
+ rescue
+ false
+ end
+
+ def in_transaction?
+ open_transactions > 0
+ end
+
+ # Returns the statement identifier for the client side cache
+ # of statements
+ def sql_key(sql)
+ "#{schema_search_path}-#{sql}"
+ end
+
+ # Prepare the statement if it hasn't been prepared, return
+ # the statement key.
+ def prepare_statement(sql)
+ @lock.synchronize do
+ sql_key = sql_key(sql)
+ unless @statements.key? sql_key
+ nextkey = @statements.next_key
+ begin
+ @connection.prepare nextkey, sql
+ rescue => e
+ raise translate_exception_class(e, sql)
+ end
+ # Clear the queue
+ @connection.get_last_result
+ @statements[sql_key] = nextkey
+ end
+ @statements[sql_key]
+ end
+ end
+
+ # Connects to a PostgreSQL server and sets up the adapter depending on the
+ # connected server's characteristics.
+ def connect
+ @connection = PG.connect(@connection_parameters)
+ configure_connection
+ rescue ::PG::Error => error
+ if error.message.include?("does not exist")
+ raise ActiveRecord::NoDatabaseError
+ else
+ raise
+ end
+ end
+
+ # Configures the encoding, verbosity, schema search path, and time zone of the connection.
+ # This is called by #connect and should not be called manually.
+ def configure_connection
+ if @config[:encoding]
+ @connection.set_client_encoding(@config[:encoding])
+ end
+ self.client_min_messages = @config[:min_messages] || "warning"
+ self.schema_search_path = @config[:schema_search_path] || @config[:schema_order]
+
+ # Use standard-conforming strings so we don't have to do the E'...' dance.
+ set_standard_conforming_strings
+
+ # If using Active Record's time zone support configure the connection to return
+ # TIMESTAMP WITH ZONE types in UTC.
+ # (SET TIME ZONE does not use an equals sign like other SET variables)
+ if ActiveRecord::Base.default_timezone == :utc
+ execute("SET time zone 'UTC'", "SCHEMA")
+ elsif @local_tz
+ execute("SET time zone '#{@local_tz}'", "SCHEMA")
+ end
+
+ # SET statements from :variables config hash
+ # https://www.postgresql.org/docs/current/static/sql-set.html
+ variables = @config[:variables] || {}
+ variables.map do |k, v|
+ if v == ":default" || v == :default
+ # Sets the value to the global or compile default
+ execute("SET SESSION #{k} TO DEFAULT", "SCHEMA")
+ elsif !v.nil?
+ execute("SET SESSION #{k} TO #{quote(v)}", "SCHEMA")
+ end
+ end
+ end
+
+ # Returns the list of a table's column names, data types, and default values.
+ #
+ # The underlying query is roughly:
+ # SELECT column.name, column.type, default.value, column.comment
+ # FROM column LEFT JOIN default
+ # ON column.table_id = default.table_id
+ # AND column.num = default.column_num
+ # WHERE column.table_id = get_table_id('table_name')
+ # AND column.num > 0
+ # AND NOT column.is_dropped
+ # ORDER BY column.num
+ #
+ # If the table name is not prefixed with a schema, the database will
+ # take the first match from the schema search path.
+ #
+ # Query implementation notes:
+ # - format_type includes the column size constraint, e.g. varchar(50)
+ # - ::regclass is a function that gives the id for a table name
+ def column_definitions(table_name)
+ query(<<-end_sql, "SCHEMA")
+ SELECT a.attname, format_type(a.atttypid, a.atttypmod),
+ pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod,
+ c.collname, col_description(a.attrelid, a.attnum) AS comment
+ FROM pg_attribute a
+ LEFT JOIN pg_attrdef d ON a.attrelid = d.adrelid AND a.attnum = d.adnum
+ LEFT JOIN pg_type t ON a.atttypid = t.oid
+ LEFT JOIN pg_collation c ON a.attcollation = c.oid AND a.attcollation <> t.typcollation
+ WHERE a.attrelid = #{quote(quote_table_name(table_name))}::regclass
+ AND a.attnum > 0 AND NOT a.attisdropped
+ ORDER BY a.attnum
+ end_sql
+ end
+
+ def extract_table_ref_from_insert_sql(sql)
+ sql[/into\s("[A-Za-z0-9_."\[\]\s]+"|[A-Za-z0-9_."\[\]]+)\s*/im]
+ $1.strip if $1
+ end
+
+ def arel_visitor
+ Arel::Visitors::PostgreSQL.new(self)
+ end
+
+ def can_perform_case_insensitive_comparison_for?(column)
+ @case_insensitive_cache ||= {}
+ @case_insensitive_cache[column.sql_type] ||= begin
+ sql = <<-end_sql
+ SELECT exists(
+ SELECT * FROM pg_proc
+ WHERE proname = 'lower'
+ AND proargtypes = ARRAY[#{quote column.sql_type}::regtype]::oidvector
+ ) OR exists(
+ SELECT * FROM pg_proc
+ INNER JOIN pg_cast
+ ON ARRAY[casttarget]::oidvector = proargtypes
+ WHERE proname = 'lower'
+ AND castsource = #{quote column.sql_type}::regtype
+ )
+ end_sql
+ execute_and_clear(sql, "SCHEMA", []) do |result|
+ result.getvalue(0, 0)
+ end
+ end
+ end
+
+ def add_pg_encoders
+ map = PG::TypeMapByClass.new
+ map[Integer] = PG::TextEncoder::Integer.new
+ map[TrueClass] = PG::TextEncoder::Boolean.new
+ map[FalseClass] = PG::TextEncoder::Boolean.new
+ @connection.type_map_for_queries = map
+ end
+
+ def add_pg_decoders
+ coders_by_name = {
+ "int2" => PG::TextDecoder::Integer,
+ "int4" => PG::TextDecoder::Integer,
+ "int8" => PG::TextDecoder::Integer,
+ "oid" => PG::TextDecoder::Integer,
+ "float4" => PG::TextDecoder::Float,
+ "float8" => PG::TextDecoder::Float,
+ "bool" => PG::TextDecoder::Boolean,
+ }
+ known_coder_types = coders_by_name.keys.map { |n| quote(n) }
+ query = <<-SQL % known_coder_types.join(", ")
+ SELECT t.oid, t.typname
+ FROM pg_type as t
+ WHERE t.typname IN (%s)
+ SQL
+ coders = execute_and_clear(query, "SCHEMA", []) do |result|
+ result
+ .map { |row| construct_coder(row, coders_by_name[row["typname"]]) }
+ .compact
+ end
+
+ map = PG::TypeMapByOid.new
+ coders.each { |coder| map.add_coder(coder) }
+ @connection.type_map_for_results = map
+ end
+
+ def construct_coder(row, coder_class)
+ return unless coder_class
+ coder_class.new(oid: row["oid"].to_i, name: row["typname"])
+ end
+
+ ActiveRecord::Type.add_modifier({ array: true }, OID::Array, adapter: :postgresql)
+ ActiveRecord::Type.add_modifier({ range: true }, OID::Range, adapter: :postgresql)
+ ActiveRecord::Type.register(:bit, OID::Bit, adapter: :postgresql)
+ ActiveRecord::Type.register(:bit_varying, OID::BitVarying, adapter: :postgresql)
+ ActiveRecord::Type.register(:binary, OID::Bytea, adapter: :postgresql)
+ ActiveRecord::Type.register(:cidr, OID::Cidr, adapter: :postgresql)
+ ActiveRecord::Type.register(:datetime, OID::DateTime, adapter: :postgresql)
+ ActiveRecord::Type.register(:decimal, OID::Decimal, adapter: :postgresql)
+ ActiveRecord::Type.register(:enum, OID::Enum, adapter: :postgresql)
+ ActiveRecord::Type.register(:hstore, OID::Hstore, adapter: :postgresql)
+ ActiveRecord::Type.register(:inet, OID::Inet, adapter: :postgresql)
+ ActiveRecord::Type.register(:jsonb, OID::Jsonb, adapter: :postgresql)
+ ActiveRecord::Type.register(:money, OID::Money, adapter: :postgresql)
+ ActiveRecord::Type.register(:point, OID::Point, adapter: :postgresql)
+ ActiveRecord::Type.register(:legacy_point, OID::LegacyPoint, adapter: :postgresql)
+ ActiveRecord::Type.register(:uuid, OID::Uuid, adapter: :postgresql)
+ ActiveRecord::Type.register(:vector, OID::Vector, adapter: :postgresql)
+ ActiveRecord::Type.register(:xml, OID::Xml, adapter: :postgresql)
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/schema_cache.rb b/activerecord/lib/active_record/connection_adapters/schema_cache.rb
new file mode 100644
index 0000000000..f34b6733da
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/schema_cache.rb
@@ -0,0 +1,118 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ class SchemaCache
+ attr_reader :version
+ attr_accessor :connection
+
+ def initialize(conn)
+ @connection = conn
+
+ @columns = {}
+ @columns_hash = {}
+ @primary_keys = {}
+ @data_sources = {}
+ end
+
+ def initialize_dup(other)
+ super
+ @columns = @columns.dup
+ @columns_hash = @columns_hash.dup
+ @primary_keys = @primary_keys.dup
+ @data_sources = @data_sources.dup
+ end
+
+ def encode_with(coder)
+ coder["columns"] = @columns
+ coder["columns_hash"] = @columns_hash
+ coder["primary_keys"] = @primary_keys
+ coder["data_sources"] = @data_sources
+ coder["version"] = ActiveRecord::Migrator.current_version
+ end
+
+ def init_with(coder)
+ @columns = coder["columns"]
+ @columns_hash = coder["columns_hash"]
+ @primary_keys = coder["primary_keys"]
+ @data_sources = coder["data_sources"]
+ @version = coder["version"]
+ end
+
+ def primary_keys(table_name)
+ @primary_keys[table_name] ||= data_source_exists?(table_name) ? connection.primary_key(table_name) : nil
+ end
+
+ # A cached lookup for table existence.
+ def data_source_exists?(name)
+ prepare_data_sources if @data_sources.empty?
+ return @data_sources[name] if @data_sources.key? name
+
+ @data_sources[name] = connection.data_source_exists?(name)
+ end
+
+ # Add internal cache for table with +table_name+.
+ def add(table_name)
+ if data_source_exists?(table_name)
+ primary_keys(table_name)
+ columns(table_name)
+ columns_hash(table_name)
+ end
+ end
+
+ def data_sources(name)
+ @data_sources[name]
+ end
+
+ # Get the columns for a table
+ def columns(table_name)
+ @columns[table_name] ||= connection.columns(table_name)
+ end
+
+ # Get the columns for a table as a hash, key is the column name
+ # value is the column object.
+ def columns_hash(table_name)
+ @columns_hash[table_name] ||= Hash[columns(table_name).map { |col|
+ [col.name, col]
+ }]
+ end
+
+ # Clears out internal caches
+ def clear!
+ @columns.clear
+ @columns_hash.clear
+ @primary_keys.clear
+ @data_sources.clear
+ @version = nil
+ end
+
+ def size
+ [@columns, @columns_hash, @primary_keys, @data_sources].map(&:size).inject :+
+ end
+
+ # Clear out internal caches for the data source +name+.
+ def clear_data_source_cache!(name)
+ @columns.delete name
+ @columns_hash.delete name
+ @primary_keys.delete name
+ @data_sources.delete name
+ end
+
+ def marshal_dump
+ # if we get current version during initialization, it happens stack over flow.
+ @version = ActiveRecord::Migrator.current_version
+ [@version, @columns, @columns_hash, @primary_keys, @data_sources]
+ end
+
+ def marshal_load(array)
+ @version, @columns, @columns_hash, @primary_keys, @data_sources = array
+ end
+
+ private
+
+ def prepare_data_sources
+ connection.data_sources.each { |source| @data_sources[source] = true }
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/sql_type_metadata.rb b/activerecord/lib/active_record/connection_adapters/sql_type_metadata.rb
new file mode 100644
index 0000000000..8489bcbf1d
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/sql_type_metadata.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # :stopdoc:
+ module ConnectionAdapters
+ class SqlTypeMetadata
+ attr_reader :sql_type, :type, :limit, :precision, :scale
+
+ def initialize(sql_type: nil, type: nil, limit: nil, precision: nil, scale: nil)
+ @sql_type = sql_type
+ @type = type
+ @limit = limit
+ @precision = precision
+ @scale = scale
+ end
+
+ def ==(other)
+ other.is_a?(SqlTypeMetadata) &&
+ attributes_for_hash == other.attributes_for_hash
+ end
+ alias eql? ==
+
+ def hash
+ attributes_for_hash.hash
+ end
+
+ protected
+
+ def attributes_for_hash
+ [self.class, sql_type, type, limit, precision, scale]
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite3/explain_pretty_printer.rb b/activerecord/lib/active_record/connection_adapters/sqlite3/explain_pretty_printer.rb
new file mode 100644
index 0000000000..832fdfe5c4
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/sqlite3/explain_pretty_printer.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module SQLite3
+ class ExplainPrettyPrinter # :nodoc:
+ # Pretty prints the result of an EXPLAIN QUERY PLAN in a way that resembles
+ # the output of the SQLite shell:
+ #
+ # 0|0|0|SEARCH TABLE users USING INTEGER PRIMARY KEY (rowid=?) (~1 rows)
+ # 0|1|1|SCAN TABLE posts (~100000 rows)
+ #
+ def pp(result)
+ result.rows.map do |row|
+ row.join("|")
+ end.join("\n") + "\n"
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite3/quoting.rb b/activerecord/lib/active_record/connection_adapters/sqlite3/quoting.rb
new file mode 100644
index 0000000000..8042dbfea2
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/sqlite3/quoting.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module SQLite3
+ module Quoting # :nodoc:
+ def quote_string(s)
+ @connection.class.quote(s)
+ end
+
+ def quote_table_name_for_assignment(table, attr)
+ quote_column_name(attr)
+ end
+
+ def quote_column_name(name)
+ @quoted_column_names[name] ||= %Q("#{super.gsub('"', '""')}").freeze
+ end
+
+ def quoted_time(value)
+ quoted_date(value)
+ end
+
+ def quoted_binary(value)
+ "x'#{value.hex}'"
+ end
+
+ def quoted_true
+ ActiveRecord::ConnectionAdapters::SQLite3Adapter.represent_boolean_as_integer ? "1".freeze : "'t'".freeze
+ end
+
+ def unquoted_true
+ ActiveRecord::ConnectionAdapters::SQLite3Adapter.represent_boolean_as_integer ? 1 : "t".freeze
+ end
+
+ def quoted_false
+ ActiveRecord::ConnectionAdapters::SQLite3Adapter.represent_boolean_as_integer ? "0".freeze : "'f'".freeze
+ end
+
+ def unquoted_false
+ ActiveRecord::ConnectionAdapters::SQLite3Adapter.represent_boolean_as_integer ? 0 : "f".freeze
+ end
+
+ private
+
+ def _type_cast(value)
+ case value
+ when BigDecimal
+ value.to_f
+ when String
+ if value.encoding == Encoding::ASCII_8BIT
+ super(value.encode(Encoding::UTF_8))
+ else
+ super
+ end
+ else
+ super
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite3/schema_creation.rb b/activerecord/lib/active_record/connection_adapters/sqlite3/schema_creation.rb
new file mode 100644
index 0000000000..b842561317
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/sqlite3/schema_creation.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module SQLite3
+ class SchemaCreation < AbstractAdapter::SchemaCreation # :nodoc:
+ private
+ def add_column_options!(sql, options)
+ if options[:collation]
+ sql << " COLLATE \"#{options[:collation]}\""
+ end
+ super
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite3/schema_definitions.rb b/activerecord/lib/active_record/connection_adapters/sqlite3/schema_definitions.rb
new file mode 100644
index 0000000000..501f17dbad
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/sqlite3/schema_definitions.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module SQLite3
+ module ColumnMethods
+ def primary_key(name, type = :primary_key, **options)
+ if %i(integer bigint).include?(type) && (options.delete(:auto_increment) == true || !options.key?(:default))
+ type = :primary_key
+ end
+
+ super
+ end
+ end
+
+ class TableDefinition < ActiveRecord::ConnectionAdapters::TableDefinition
+ include ColumnMethods
+
+ def references(*args, **options)
+ super(*args, type: :integer, **options)
+ end
+ alias :belongs_to :references
+ end
+
+ class Table < ActiveRecord::ConnectionAdapters::Table
+ include ColumnMethods
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite3/schema_dumper.rb b/activerecord/lib/active_record/connection_adapters/sqlite3/schema_dumper.rb
new file mode 100644
index 0000000000..ab057c73f1
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/sqlite3/schema_dumper.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module SQLite3
+ module ColumnDumper # :nodoc:
+ private
+
+ def default_primary_key?(column)
+ schema_type(column) == :integer
+ end
+
+ def explicit_primary_key_default?(column)
+ column.bigint?
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite3/schema_statements.rb b/activerecord/lib/active_record/connection_adapters/sqlite3/schema_statements.rb
new file mode 100644
index 0000000000..c155e7f1ac
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/sqlite3/schema_statements.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module SQLite3
+ module SchemaStatements # :nodoc:
+ # Returns an array of indexes for the given table.
+ def indexes(table_name, name = nil)
+ if name
+ ActiveSupport::Deprecation.warn(<<-MSG.squish)
+ Passing name to #indexes is deprecated without replacement.
+ MSG
+ end
+
+ exec_query("PRAGMA index_list(#{quote_table_name(table_name)})", "SCHEMA").map do |row|
+ index_sql = query_value(<<-SQL, "SCHEMA")
+ SELECT sql
+ FROM sqlite_master
+ WHERE name = #{quote(row['name'])} AND type = 'index'
+ UNION ALL
+ SELECT sql
+ FROM sqlite_temp_master
+ WHERE name = #{quote(row['name'])} AND type = 'index'
+ SQL
+
+ /\sWHERE\s+(?<where>.+)$/i =~ index_sql
+
+ columns = exec_query("PRAGMA index_info(#{quote(row['name'])})", "SCHEMA").map do |col|
+ col["name"]
+ end
+
+ IndexDefinition.new(
+ table_name,
+ row["name"],
+ row["unique"] != 0,
+ columns,
+ where: where
+ )
+ end
+ end
+
+ private
+ def schema_creation
+ SQLite3::SchemaCreation.new(self)
+ end
+
+ def create_table_definition(*args)
+ SQLite3::TableDefinition.new(*args)
+ end
+
+ def new_column_from_field(table_name, field)
+ default = \
+ case field["dflt_value"]
+ when /^null$/i
+ nil
+ when /^'(.*)'$/m
+ $1.gsub("''", "'")
+ when /^"(.*)"$/m
+ $1.gsub('""', '"')
+ else
+ field["dflt_value"]
+ end
+
+ type_metadata = fetch_type_metadata(field["type"])
+ Column.new(field["name"], default, type_metadata, field["notnull"].to_i == 0, table_name, nil, field["collation"])
+ end
+
+ def data_source_sql(name = nil, type: nil)
+ scope = quoted_scope(name, type: type)
+ scope[:type] ||= "'table','view'"
+
+ sql = "SELECT name FROM sqlite_master WHERE name <> 'sqlite_sequence'".dup
+ sql << " AND name = #{scope[:name]}" if scope[:name]
+ sql << " AND type IN (#{scope[:type]})"
+ sql
+ end
+
+ def quoted_scope(name = nil, type: nil)
+ type = \
+ case type
+ when "BASE TABLE"
+ "'table'"
+ when "VIEW"
+ "'view'"
+ end
+ scope = {}
+ scope[:name] = quote(name) if name
+ scope[:type] = type if type
+ scope
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb b/activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb
new file mode 100644
index 0000000000..8c12cb09bd
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb
@@ -0,0 +1,537 @@
+# frozen_string_literal: true
+
+require_relative "abstract_adapter"
+require_relative "statement_pool"
+require_relative "sqlite3/explain_pretty_printer"
+require_relative "sqlite3/quoting"
+require_relative "sqlite3/schema_creation"
+require_relative "sqlite3/schema_definitions"
+require_relative "sqlite3/schema_dumper"
+require_relative "sqlite3/schema_statements"
+
+gem "sqlite3", "~> 1.3.6"
+require "sqlite3"
+
+module ActiveRecord
+ module ConnectionHandling # :nodoc:
+ def sqlite3_connection(config)
+ # Require database.
+ unless config[:database]
+ raise ArgumentError, "No database file specified. Missing argument: database"
+ end
+
+ # Allow database path relative to Rails.root, but only if the database
+ # path is not the special path that tells sqlite to build a database only
+ # in memory.
+ if ":memory:" != config[:database]
+ config[:database] = File.expand_path(config[:database], Rails.root) if defined?(Rails.root)
+ dirname = File.dirname(config[:database])
+ Dir.mkdir(dirname) unless File.directory?(dirname)
+ end
+
+ db = SQLite3::Database.new(
+ config[:database].to_s,
+ results_as_hash: true
+ )
+
+ db.busy_timeout(ConnectionAdapters::SQLite3Adapter.type_cast_config_to_integer(config[:timeout])) if config[:timeout]
+
+ ConnectionAdapters::SQLite3Adapter.new(db, logger, nil, config)
+ rescue Errno::ENOENT => error
+ if error.message.include?("No such file or directory")
+ raise ActiveRecord::NoDatabaseError
+ else
+ raise
+ end
+ end
+ end
+
+ module ConnectionAdapters #:nodoc:
+ # The SQLite3 adapter works SQLite 3.6.16 or newer
+ # with the sqlite3-ruby drivers (available as gem from https://rubygems.org/gems/sqlite3).
+ #
+ # Options:
+ #
+ # * <tt>:database</tt> - Path to the database file.
+ class SQLite3Adapter < AbstractAdapter
+ ADAPTER_NAME = "SQLite".freeze
+
+ include SQLite3::Quoting
+ include SQLite3::ColumnDumper
+ include SQLite3::SchemaStatements
+
+ NATIVE_DATABASE_TYPES = {
+ primary_key: "INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL",
+ string: { name: "varchar" },
+ text: { name: "text" },
+ integer: { name: "integer" },
+ float: { name: "float" },
+ decimal: { name: "decimal" },
+ datetime: { name: "datetime" },
+ time: { name: "time" },
+ date: { name: "date" },
+ binary: { name: "blob" },
+ boolean: { name: "boolean" }
+ }
+
+ ##
+ # :singleton-method:
+ # Indicates whether boolean values are stored in sqlite3 databases as 1
+ # and 0 or 't' and 'f'. Leaving `ActiveRecord::ConnectionAdapters::SQLite3Adapter.represent_boolean_as_integer`
+ # set to false is deprecated. SQLite databases have used 't' and 'f' to
+ # serialize boolean values and must have old data converted to 1 and 0
+ # (its native boolean serialization) before setting this flag to true.
+ # Conversion can be accomplished by setting up a rake task which runs
+ #
+ # ExampleModel.where("boolean_column = 't'").update_all(boolean_column: 1)
+ # ExampleModel.where("boolean_column = 'f'").update_all(boolean_column: 0)
+ # for all models and all boolean columns, after which the flag must be set
+ # to true by adding the following to your application.rb file:
+ #
+ # Rails.application.config.active_record.sqlite3.represent_boolean_as_integer = true
+ class_attribute :represent_boolean_as_integer, default: false
+
+ class StatementPool < ConnectionAdapters::StatementPool
+ private
+
+ def dealloc(stmt)
+ stmt[:stmt].close unless stmt[:stmt].closed?
+ end
+ end
+
+ def update_table_definition(table_name, base) # :nodoc:
+ SQLite3::Table.new(table_name, base)
+ end
+
+ def initialize(connection, logger, connection_options, config)
+ super(connection, logger, config)
+
+ @active = nil
+ @statements = StatementPool.new(self.class.type_cast_config_to_integer(config[:statement_limit]))
+
+ configure_connection
+ end
+
+ def supports_ddl_transactions?
+ true
+ end
+
+ def supports_savepoints?
+ true
+ end
+
+ def supports_partial_index?
+ sqlite_version >= "3.8.0"
+ end
+
+ def requires_reloading?
+ true
+ end
+
+ def supports_foreign_keys_in_create?
+ sqlite_version >= "3.6.19"
+ end
+
+ def supports_views?
+ true
+ end
+
+ def supports_datetime_with_precision?
+ true
+ end
+
+ def supports_multi_insert?
+ sqlite_version >= "3.7.11"
+ end
+
+ def active?
+ @active != false
+ end
+
+ # Disconnects from the database if already connected. Otherwise, this
+ # method does nothing.
+ def disconnect!
+ super
+ @active = false
+ @connection.close rescue nil
+ end
+
+ # Clears the prepared statements cache.
+ def clear_cache!
+ @statements.clear
+ end
+
+ def supports_index_sort_order?
+ true
+ end
+
+ # Returns 62. SQLite supports index names up to 64
+ # characters. The rest is used by Rails internally to perform
+ # temporary rename operations
+ def allowed_index_name_length
+ index_name_length - 2
+ end
+
+ def native_database_types #:nodoc:
+ NATIVE_DATABASE_TYPES
+ end
+
+ # Returns the current database encoding format as a string, eg: 'UTF-8'
+ def encoding
+ @connection.encoding.to_s
+ end
+
+ def supports_explain?
+ true
+ end
+
+ # REFERENTIAL INTEGRITY ====================================
+
+ def disable_referential_integrity # :nodoc:
+ old = query_value("PRAGMA foreign_keys")
+
+ begin
+ execute("PRAGMA foreign_keys = OFF")
+ yield
+ ensure
+ execute("PRAGMA foreign_keys = #{old}")
+ end
+ end
+
+ #--
+ # DATABASE STATEMENTS ======================================
+ #++
+
+ def explain(arel, binds = [])
+ sql, binds = to_sql(arel, binds)
+ sql = "EXPLAIN QUERY PLAN #{sql}"
+ SQLite3::ExplainPrettyPrinter.new.pp(exec_query(sql, "EXPLAIN", []))
+ end
+
+ def exec_query(sql, name = nil, binds = [], prepare: false)
+ type_casted_binds = type_casted_binds(binds)
+
+ log(sql, name, binds, type_casted_binds) do
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ # Don't cache statements if they are not prepared
+ unless prepare
+ stmt = @connection.prepare(sql)
+ begin
+ cols = stmt.columns
+ unless without_prepared_statement?(binds)
+ stmt.bind_params(type_casted_binds)
+ end
+ records = stmt.to_a
+ ensure
+ stmt.close
+ end
+ else
+ cache = @statements[sql] ||= {
+ stmt: @connection.prepare(sql)
+ }
+ stmt = cache[:stmt]
+ cols = cache[:cols] ||= stmt.columns
+ stmt.reset!
+ stmt.bind_params(type_casted_binds)
+ records = stmt.to_a
+ end
+
+ ActiveRecord::Result.new(cols, records)
+ end
+ end
+ end
+
+ def exec_delete(sql, name = "SQL", binds = [])
+ exec_query(sql, name, binds)
+ @connection.changes
+ end
+ alias :exec_update :exec_delete
+
+ def last_inserted_id(result)
+ @connection.last_insert_row_id
+ end
+
+ def execute(sql, name = nil) #:nodoc:
+ log(sql, name) do
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ @connection.execute(sql)
+ end
+ end
+ end
+
+ def begin_db_transaction #:nodoc:
+ log("begin transaction", nil) { @connection.transaction }
+ end
+
+ def commit_db_transaction #:nodoc:
+ log("commit transaction", nil) { @connection.commit }
+ end
+
+ def exec_rollback_db_transaction #:nodoc:
+ log("rollback transaction", nil) { @connection.rollback }
+ end
+
+ # SCHEMA STATEMENTS ========================================
+
+ def primary_keys(table_name) # :nodoc:
+ pks = table_structure(table_name).select { |f| f["pk"] > 0 }
+ pks.sort_by { |f| f["pk"] }.map { |f| f["name"] }
+ end
+
+ def remove_index(table_name, options = {}) #:nodoc:
+ index_name = index_name_for_remove(table_name, options)
+ exec_query "DROP INDEX #{quote_column_name(index_name)}"
+ end
+
+ # Renames a table.
+ #
+ # Example:
+ # rename_table('octopuses', 'octopi')
+ def rename_table(table_name, new_name)
+ exec_query "ALTER TABLE #{quote_table_name(table_name)} RENAME TO #{quote_table_name(new_name)}"
+ rename_table_indexes(table_name, new_name)
+ end
+
+ # See: http://www.sqlite.org/lang_altertable.html
+ # SQLite has an additional restriction on the ALTER TABLE statement
+ def valid_alter_table_type?(type)
+ type.to_sym != :primary_key
+ end
+
+ def add_column(table_name, column_name, type, options = {}) #:nodoc:
+ if valid_alter_table_type?(type)
+ super(table_name, column_name, type, options)
+ else
+ alter_table(table_name) do |definition|
+ definition.column(column_name, type, options)
+ end
+ end
+ end
+
+ def remove_column(table_name, column_name, type = nil, options = {}) #:nodoc:
+ alter_table(table_name) do |definition|
+ definition.remove_column column_name
+ end
+ end
+
+ def change_column_default(table_name, column_name, default_or_changes) #:nodoc:
+ default = extract_new_default_value(default_or_changes)
+
+ alter_table(table_name) do |definition|
+ definition[column_name].default = default
+ end
+ end
+
+ def change_column_null(table_name, column_name, null, default = nil) #:nodoc:
+ unless null || default.nil?
+ exec_query("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote(default)} WHERE #{quote_column_name(column_name)} IS NULL")
+ end
+ alter_table(table_name) do |definition|
+ definition[column_name].null = null
+ end
+ end
+
+ def change_column(table_name, column_name, type, options = {}) #:nodoc:
+ alter_table(table_name) do |definition|
+ definition[column_name].instance_eval do
+ self.type = type
+ self.limit = options[:limit] if options.include?(:limit)
+ self.default = options[:default] if options.include?(:default)
+ self.null = options[:null] if options.include?(:null)
+ self.precision = options[:precision] if options.include?(:precision)
+ self.scale = options[:scale] if options.include?(:scale)
+ self.collation = options[:collation] if options.include?(:collation)
+ end
+ end
+ end
+
+ def rename_column(table_name, column_name, new_column_name) #:nodoc:
+ column = column_for(table_name, column_name)
+ alter_table(table_name, rename: { column.name => new_column_name.to_s })
+ rename_column_indexes(table_name, column.name, new_column_name)
+ end
+
+ def add_reference(table_name, ref_name, **options) # :nodoc:
+ super(table_name, ref_name, type: :integer, **options)
+ end
+ alias :add_belongs_to :add_reference
+
+ def foreign_keys(table_name)
+ fk_info = exec_query("PRAGMA foreign_key_list(#{quote(table_name)})", "SCHEMA")
+ fk_info.map do |row|
+ options = {
+ column: row["from"],
+ primary_key: row["to"],
+ on_delete: extract_foreign_key_action(row["on_delete"]),
+ on_update: extract_foreign_key_action(row["on_update"])
+ }
+ ForeignKeyDefinition.new(table_name, row["table"], options)
+ end
+ end
+
+ def insert_fixtures(rows, table_name)
+ rows.each do |row|
+ insert_fixture(row, table_name)
+ end
+ end
+
+ private
+
+ def table_structure(table_name)
+ structure = exec_query("PRAGMA table_info(#{quote_table_name(table_name)})", "SCHEMA")
+ raise(ActiveRecord::StatementInvalid, "Could not find table '#{table_name}'") if structure.empty?
+ table_structure_with_collation(table_name, structure)
+ end
+ alias column_definitions table_structure
+
+ def alter_table(table_name, options = {})
+ altered_table_name = "a#{table_name}"
+ caller = lambda { |definition| yield definition if block_given? }
+
+ transaction do
+ move_table(table_name, altered_table_name,
+ options.merge(temporary: true))
+ move_table(altered_table_name, table_name, &caller)
+ end
+ end
+
+ def move_table(from, to, options = {}, &block)
+ copy_table(from, to, options, &block)
+ drop_table(from)
+ end
+
+ def copy_table(from, to, options = {})
+ from_primary_key = primary_key(from)
+ options[:id] = false
+ create_table(to, options) do |definition|
+ @definition = definition
+ @definition.primary_key(from_primary_key) if from_primary_key.present?
+ columns(from).each do |column|
+ column_name = options[:rename] ?
+ (options[:rename][column.name] ||
+ options[:rename][column.name.to_sym] ||
+ column.name) : column.name
+ next if column_name == from_primary_key
+
+ @definition.column(column_name, column.type,
+ limit: column.limit, default: column.default,
+ precision: column.precision, scale: column.scale,
+ null: column.null, collation: column.collation)
+ end
+ yield @definition if block_given?
+ end
+ copy_table_indexes(from, to, options[:rename] || {})
+ copy_table_contents(from, to,
+ @definition.columns.map(&:name),
+ options[:rename] || {})
+ end
+
+ def copy_table_indexes(from, to, rename = {})
+ indexes(from).each do |index|
+ name = index.name
+ if to == "a#{from}"
+ name = "t#{name}"
+ elsif from == "a#{to}"
+ name = name[1..-1]
+ end
+
+ to_column_names = columns(to).map(&:name)
+ columns = index.columns.map { |c| rename[c] || c }.select do |column|
+ to_column_names.include?(column)
+ end
+
+ unless columns.empty?
+ # index name can't be the same
+ opts = { name: name.gsub(/(^|_)(#{from})_/, "\\1#{to}_"), internal: true }
+ opts[:unique] = true if index.unique
+ add_index(to, columns, opts)
+ end
+ end
+ end
+
+ def copy_table_contents(from, to, columns, rename = {})
+ column_mappings = Hash[columns.map { |name| [name, name] }]
+ rename.each { |a| column_mappings[a.last] = a.first }
+ from_columns = columns(from).collect(&:name)
+ columns = columns.find_all { |col| from_columns.include?(column_mappings[col]) }
+ from_columns_to_copy = columns.map { |col| column_mappings[col] }
+ quoted_columns = columns.map { |col| quote_column_name(col) } * ","
+ quoted_from_columns = from_columns_to_copy.map { |col| quote_column_name(col) } * ","
+
+ exec_query("INSERT INTO #{quote_table_name(to)} (#{quoted_columns})
+ SELECT #{quoted_from_columns} FROM #{quote_table_name(from)}")
+ end
+
+ def sqlite_version
+ @sqlite_version ||= SQLite3Adapter::Version.new(query_value("SELECT sqlite_version(*)"))
+ end
+
+ def translate_exception(exception, message)
+ case exception.message
+ # SQLite 3.8.2 returns a newly formatted error message:
+ # UNIQUE constraint failed: *table_name*.*column_name*
+ # Older versions of SQLite return:
+ # column *column_name* is not unique
+ when /column(s)? .* (is|are) not unique/, /UNIQUE constraint failed: .*/
+ RecordNotUnique.new(message)
+ when /.* may not be NULL/, /NOT NULL constraint failed: .*/
+ NotNullViolation.new(message)
+ when /FOREIGN KEY constraint failed/i
+ InvalidForeignKey.new(message)
+ else
+ super
+ end
+ end
+
+ COLLATE_REGEX = /.*\"(\w+)\".*collate\s+\"(\w+)\".*/i.freeze
+
+ def table_structure_with_collation(table_name, basic_structure)
+ collation_hash = {}
+ sql = <<-SQL
+ SELECT sql FROM
+ (SELECT * FROM sqlite_master UNION ALL
+ SELECT * FROM sqlite_temp_master)
+ WHERE type = 'table' AND name = #{quote(table_name)}
+ SQL
+
+ # Result will have following sample string
+ # CREATE TABLE "users" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
+ # "password_digest" varchar COLLATE "NOCASE");
+ result = exec_query(sql, "SCHEMA").first
+
+ if result
+ # Splitting with left parentheses and picking up last will return all
+ # columns separated with comma(,).
+ columns_string = result["sql"].split("(").last
+
+ columns_string.split(",").each do |column_string|
+ # This regex will match the column name and collation type and will save
+ # the value in $1 and $2 respectively.
+ collation_hash[$1] = $2 if COLLATE_REGEX =~ column_string
+ end
+
+ basic_structure.map! do |column|
+ column_name = column["name"]
+
+ if collation_hash.has_key? column_name
+ column["collation"] = collation_hash[column_name]
+ end
+
+ column
+ end
+ else
+ basic_structure.to_hash
+ end
+ end
+
+ def arel_visitor
+ Arel::Visitors::SQLite.new(self)
+ end
+
+ def configure_connection
+ execute("PRAGMA foreign_keys = ON", "SCHEMA")
+ end
+ end
+ ActiveSupport.run_load_hooks(:active_record_sqlite3adapter, SQLite3Adapter)
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/statement_pool.rb b/activerecord/lib/active_record/connection_adapters/statement_pool.rb
new file mode 100644
index 0000000000..46bd831da7
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/statement_pool.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ class StatementPool # :nodoc:
+ include Enumerable
+
+ DEFAULT_STATEMENT_LIMIT = 1000
+
+ def initialize(statement_limit = nil)
+ @cache = Hash.new { |h, pid| h[pid] = {} }
+ @statement_limit = statement_limit || DEFAULT_STATEMENT_LIMIT
+ end
+
+ def each(&block)
+ cache.each(&block)
+ end
+
+ def key?(key)
+ cache.key?(key)
+ end
+
+ def [](key)
+ cache[key]
+ end
+
+ def length
+ cache.length
+ end
+
+ def []=(sql, stmt)
+ while @statement_limit <= cache.size
+ dealloc(cache.shift.last)
+ end
+ cache[sql] = stmt
+ end
+
+ def clear
+ cache.each_value do |stmt|
+ dealloc stmt
+ end
+ cache.clear
+ end
+
+ def delete(key)
+ dealloc cache[key]
+ cache.delete(key)
+ end
+
+ private
+
+ def cache
+ @cache[Process.pid]
+ end
+
+ def dealloc(stmt)
+ raise NotImplementedError
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_handling.rb b/activerecord/lib/active_record/connection_handling.rb
new file mode 100644
index 0000000000..9a47edfba4
--- /dev/null
+++ b/activerecord/lib/active_record/connection_handling.rb
@@ -0,0 +1,145 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionHandling
+ RAILS_ENV = -> { (Rails.env if defined?(Rails.env)) || ENV["RAILS_ENV"].presence || ENV["RACK_ENV"].presence }
+ DEFAULT_ENV = -> { RAILS_ENV.call || "default_env" }
+
+ # Establishes the connection to the database. Accepts a hash as input where
+ # the <tt>:adapter</tt> key must be specified with the name of a database adapter (in lower-case)
+ # example for regular databases (MySQL, PostgreSQL, etc):
+ #
+ # ActiveRecord::Base.establish_connection(
+ # adapter: "mysql2",
+ # host: "localhost",
+ # username: "myuser",
+ # password: "mypass",
+ # database: "somedatabase"
+ # )
+ #
+ # Example for SQLite database:
+ #
+ # ActiveRecord::Base.establish_connection(
+ # adapter: "sqlite3",
+ # database: "path/to/dbfile"
+ # )
+ #
+ # Also accepts keys as strings (for parsing from YAML for example):
+ #
+ # ActiveRecord::Base.establish_connection(
+ # "adapter" => "sqlite3",
+ # "database" => "path/to/dbfile"
+ # )
+ #
+ # Or a URL:
+ #
+ # ActiveRecord::Base.establish_connection(
+ # "postgres://myuser:mypass@localhost/somedatabase"
+ # )
+ #
+ # In case {ActiveRecord::Base.configurations}[rdoc-ref:Core.configurations]
+ # is set (Rails automatically loads the contents of config/database.yml into it),
+ # a symbol can also be given as argument, representing a key in the
+ # configuration hash:
+ #
+ # ActiveRecord::Base.establish_connection(:production)
+ #
+ # The exceptions AdapterNotSpecified, AdapterNotFound and +ArgumentError+
+ # may be returned on an error.
+ def establish_connection(config = nil)
+ raise "Anonymous class is not allowed." unless name
+
+ config ||= DEFAULT_ENV.call.to_sym
+ spec_name = self == Base ? "primary" : name
+ self.connection_specification_name = spec_name
+
+ resolver = ConnectionAdapters::ConnectionSpecification::Resolver.new(Base.configurations)
+ spec = resolver.resolve(config).symbolize_keys
+ spec[:name] = spec_name
+
+ connection_handler.establish_connection(spec)
+ end
+
+ class MergeAndResolveDefaultUrlConfig # :nodoc:
+ def initialize(raw_configurations)
+ @raw_config = raw_configurations.dup
+ @env = DEFAULT_ENV.call.to_s
+ end
+
+ # Returns fully resolved connection hashes.
+ # Merges connection information from `ENV['DATABASE_URL']` if available.
+ def resolve
+ ConnectionAdapters::ConnectionSpecification::Resolver.new(config).resolve_all
+ end
+
+ private
+ def config
+ @raw_config.dup.tap do |cfg|
+ if url = ENV["DATABASE_URL"]
+ cfg[@env] ||= {}
+ cfg[@env]["url"] ||= url
+ end
+ end
+ end
+ end
+
+ # Returns the connection currently associated with the class. This can
+ # also be used to "borrow" the connection to do database work unrelated
+ # to any of the specific Active Records.
+ def connection
+ retrieve_connection
+ end
+
+ attr_writer :connection_specification_name
+
+ # Return the specification name from the current class or its parent.
+ def connection_specification_name
+ if !defined?(@connection_specification_name) || @connection_specification_name.nil?
+ return self == Base ? "primary" : superclass.connection_specification_name
+ end
+ @connection_specification_name
+ end
+
+ # Returns the configuration of the associated connection as a hash:
+ #
+ # ActiveRecord::Base.connection_config
+ # # => {pool: 5, timeout: 5000, database: "db/development.sqlite3", adapter: "sqlite3"}
+ #
+ # Please use only for reading.
+ def connection_config
+ connection_pool.spec.config
+ end
+
+ def connection_pool
+ connection_handler.retrieve_connection_pool(connection_specification_name) || raise(ConnectionNotEstablished)
+ end
+
+ def retrieve_connection
+ connection_handler.retrieve_connection(connection_specification_name)
+ end
+
+ # Returns +true+ if Active Record is connected.
+ def connected?
+ connection_handler.connected?(connection_specification_name)
+ end
+
+ def remove_connection(name = nil)
+ name ||= @connection_specification_name if defined?(@connection_specification_name)
+ # if removing a connection that has a pool, we reset the
+ # connection_specification_name so it will use the parent
+ # pool.
+ if connection_handler.retrieve_connection_pool(name)
+ self.connection_specification_name = nil
+ end
+
+ connection_handler.remove_connection(name)
+ end
+
+ def clear_cache! # :nodoc:
+ connection.schema_cache.clear!
+ end
+
+ delegate :clear_active_connections!, :clear_reloadable_connections!,
+ :clear_all_connections!, to: :connection_handler
+ end
+end
diff --git a/activerecord/lib/active_record/core.rb b/activerecord/lib/active_record/core.rb
new file mode 100644
index 0000000000..8b97dbe5bf
--- /dev/null
+++ b/activerecord/lib/active_record/core.rb
@@ -0,0 +1,562 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/hash/indifferent_access"
+require "active_support/core_ext/string/filters"
+
+module ActiveRecord
+ module Core
+ extend ActiveSupport::Concern
+
+ included do
+ ##
+ # :singleton-method:
+ #
+ # Accepts a logger conforming to the interface of Log4r which is then
+ # passed on to any new database connections made and which can be
+ # retrieved on both a class and instance level by calling +logger+.
+ mattr_accessor :logger, instance_writer: false
+
+ ##
+ # Contains the database configuration - as is typically stored in config/database.yml -
+ # as a Hash.
+ #
+ # For example, the following database.yml...
+ #
+ # development:
+ # adapter: sqlite3
+ # database: db/development.sqlite3
+ #
+ # production:
+ # adapter: sqlite3
+ # database: db/production.sqlite3
+ #
+ # ...would result in ActiveRecord::Base.configurations to look like this:
+ #
+ # {
+ # 'development' => {
+ # 'adapter' => 'sqlite3',
+ # 'database' => 'db/development.sqlite3'
+ # },
+ # 'production' => {
+ # 'adapter' => 'sqlite3',
+ # 'database' => 'db/production.sqlite3'
+ # }
+ # }
+ def self.configurations=(config)
+ @@configurations = ActiveRecord::ConnectionHandling::MergeAndResolveDefaultUrlConfig.new(config).resolve
+ end
+ self.configurations = {}
+
+ # Returns fully resolved configurations hash
+ def self.configurations
+ @@configurations
+ end
+
+ ##
+ # :singleton-method:
+ # Determines whether to use Time.utc (using :utc) or Time.local (using :local) when pulling
+ # dates and times from the database. This is set to :utc by default.
+ mattr_accessor :default_timezone, instance_writer: false, default: :utc
+
+ ##
+ # :singleton-method:
+ # Specifies the format to use when dumping the database schema with Rails'
+ # Rakefile. If :sql, the schema is dumped as (potentially database-
+ # specific) SQL statements. If :ruby, the schema is dumped as an
+ # ActiveRecord::Schema file which can be loaded into any database that
+ # supports migrations. Use :ruby if you want to have different database
+ # adapters for, e.g., your development and test environments.
+ mattr_accessor :schema_format, instance_writer: false, default: :ruby
+
+ ##
+ # :singleton-method:
+ # Specifies if an error should be raised if the query has an order being
+ # ignored when doing batch queries. Useful in applications where the
+ # scope being ignored is error-worthy, rather than a warning.
+ mattr_accessor :error_on_ignored_order, instance_writer: false, default: false
+
+ def self.error_on_ignored_order_or_limit
+ ActiveSupport::Deprecation.warn(<<-MSG.squish)
+ The flag error_on_ignored_order_or_limit is deprecated. Limits are
+ now supported. Please use error_on_ignored_order instead.
+ MSG
+ error_on_ignored_order
+ end
+
+ def error_on_ignored_order_or_limit
+ self.class.error_on_ignored_order_or_limit
+ end
+
+ def self.error_on_ignored_order_or_limit=(value)
+ ActiveSupport::Deprecation.warn(<<-MSG.squish)
+ The flag error_on_ignored_order_or_limit is deprecated. Limits are
+ now supported. Please use error_on_ignored_order= instead.
+ MSG
+ self.error_on_ignored_order = value
+ end
+
+ ##
+ # :singleton-method:
+ # Specify whether or not to use timestamps for migration versions
+ mattr_accessor :timestamped_migrations, instance_writer: false, default: true
+
+ ##
+ # :singleton-method:
+ # Specify whether schema dump should happen at the end of the
+ # db:migrate rake task. This is true by default, which is useful for the
+ # development environment. This should ideally be false in the production
+ # environment where dumping schema is rarely needed.
+ mattr_accessor :dump_schema_after_migration, instance_writer: false, default: true
+
+ ##
+ # :singleton-method:
+ # Specifies which database schemas to dump when calling db:structure:dump.
+ # If the value is :schema_search_path (the default), any schemas listed in
+ # schema_search_path are dumped. Use :all to dump all schemas regardless
+ # of schema_search_path, or a string of comma separated schemas for a
+ # custom list.
+ mattr_accessor :dump_schemas, instance_writer: false, default: :schema_search_path
+
+ ##
+ # :singleton-method:
+ # Specify a threshold for the size of query result sets. If the number of
+ # records in the set exceeds the threshold, a warning is logged. This can
+ # be used to identify queries which load thousands of records and
+ # potentially cause memory bloat.
+ mattr_accessor :warn_on_records_fetched_greater_than, instance_writer: false
+
+ mattr_accessor :maintain_test_schema, instance_accessor: false
+
+ mattr_accessor :belongs_to_required_by_default, instance_accessor: false
+
+ class_attribute :default_connection_handler, instance_writer: false
+
+ def self.connection_handler
+ ActiveRecord::RuntimeRegistry.connection_handler || default_connection_handler
+ end
+
+ def self.connection_handler=(handler)
+ ActiveRecord::RuntimeRegistry.connection_handler = handler
+ end
+
+ self.default_connection_handler = ConnectionAdapters::ConnectionHandler.new
+ end
+
+ module ClassMethods
+ def allocate
+ define_attribute_methods
+ super
+ end
+
+ def initialize_find_by_cache # :nodoc:
+ @find_by_statement_cache = { true => {}.extend(Mutex_m), false => {}.extend(Mutex_m) }
+ end
+
+ def inherited(child_class) # :nodoc:
+ # initialize cache at class definition for thread safety
+ child_class.initialize_find_by_cache
+ super
+ end
+
+ def find(*ids) # :nodoc:
+ # We don't have cache keys for this stuff yet
+ return super unless ids.length == 1
+ return super if block_given? ||
+ primary_key.nil? ||
+ scope_attributes? ||
+ columns_hash.include?(inheritance_column)
+
+ id = ids.first
+
+ return super if id.kind_of?(Array) ||
+ id.is_a?(ActiveRecord::Base)
+
+ key = primary_key
+
+ statement = cached_find_by_statement(key) { |params|
+ where(key => params.bind).limit(1)
+ }
+
+ record = statement.execute([id], self, connection).first
+ unless record
+ raise RecordNotFound.new("Couldn't find #{name} with '#{primary_key}'=#{id}",
+ name, primary_key, id)
+ end
+ record
+ rescue ::RangeError
+ raise RecordNotFound.new("Couldn't find #{name} with an out of range value for '#{primary_key}'",
+ name, primary_key)
+ end
+
+ def find_by(*args) # :nodoc:
+ return super if scope_attributes? || reflect_on_all_aggregations.any?
+
+ hash = args.first
+
+ return super if !(Hash === hash) || hash.values.any? { |v|
+ v.nil? || Array === v || Hash === v || Relation === v || Base === v
+ }
+
+ # We can't cache Post.find_by(author: david) ...yet
+ return super unless hash.keys.all? { |k| columns_hash.has_key?(k.to_s) }
+
+ keys = hash.keys
+
+ statement = cached_find_by_statement(keys) { |params|
+ wheres = keys.each_with_object({}) { |param, o|
+ o[param] = params.bind
+ }
+ where(wheres).limit(1)
+ }
+ begin
+ statement.execute(hash.values, self, connection).first
+ rescue TypeError
+ raise ActiveRecord::StatementInvalid
+ rescue ::RangeError
+ nil
+ end
+ end
+
+ def find_by!(*args) # :nodoc:
+ find_by(*args) || raise(RecordNotFound.new("Couldn't find #{name}", name))
+ end
+
+ def initialize_generated_modules # :nodoc:
+ generated_association_methods
+ end
+
+ def generated_association_methods
+ @generated_association_methods ||= begin
+ mod = const_set(:GeneratedAssociationMethods, Module.new)
+ private_constant :GeneratedAssociationMethods
+ include mod
+
+ mod
+ end
+ end
+
+ # Returns a string like 'Post(id:integer, title:string, body:text)'
+ def inspect
+ if self == Base
+ super
+ elsif abstract_class?
+ "#{super}(abstract)"
+ elsif !connected?
+ "#{super} (call '#{super}.connection' to establish a connection)"
+ elsif table_exists?
+ attr_list = attribute_types.map { |name, type| "#{name}: #{type.type}" } * ", "
+ "#{super}(#{attr_list})"
+ else
+ "#{super}(Table doesn't exist)"
+ end
+ end
+
+ # Overwrite the default class equality method to provide support for association proxies.
+ def ===(object)
+ object.is_a?(self)
+ end
+
+ # Returns an instance of <tt>Arel::Table</tt> loaded with the current table name.
+ #
+ # class Post < ActiveRecord::Base
+ # scope :published_and_commented, -> { published.and(arel_table[:comments_count].gt(0)) }
+ # end
+ def arel_table # :nodoc:
+ @arel_table ||= Arel::Table.new(table_name, type_caster: type_caster)
+ end
+
+ def arel_attribute(name, table = arel_table) # :nodoc:
+ name = attribute_alias(name) if attribute_alias?(name)
+ table[name]
+ end
+
+ def predicate_builder # :nodoc:
+ @predicate_builder ||= PredicateBuilder.new(table_metadata)
+ end
+
+ def type_caster # :nodoc:
+ TypeCaster::Map.new(self)
+ end
+
+ private
+
+ def cached_find_by_statement(key, &block)
+ cache = @find_by_statement_cache[connection.prepared_statements]
+ cache[key] || cache.synchronize {
+ cache[key] ||= StatementCache.create(connection, &block)
+ }
+ end
+
+ def relation
+ relation = Relation.create(self, arel_table, predicate_builder)
+
+ if finder_needs_type_condition? && !ignore_default_scope?
+ relation.where(type_condition).create_with(inheritance_column.to_s => sti_name)
+ else
+ relation
+ end
+ end
+
+ def table_metadata
+ TableMetadata.new(self, arel_table)
+ end
+ end
+
+ # New objects can be instantiated as either empty (pass no construction parameter) or pre-set with
+ # attributes but not yet saved (pass a hash with key names matching the associated table column names).
+ # In both instances, valid attribute keys are determined by the column names of the associated table --
+ # hence you can't have attributes that aren't part of the table columns.
+ #
+ # ==== Example:
+ # # Instantiates a single new object
+ # User.new(first_name: 'Jamie')
+ def initialize(attributes = nil)
+ self.class.define_attribute_methods
+ @attributes = self.class._default_attributes.deep_dup
+
+ init_internals
+ initialize_internals_callback
+
+ assign_attributes(attributes) if attributes
+
+ yield self if block_given?
+ _run_initialize_callbacks
+ end
+
+ # Initialize an empty model object from +coder+. +coder+ should be
+ # the result of previously encoding an Active Record model, using
+ # #encode_with.
+ #
+ # class Post < ActiveRecord::Base
+ # end
+ #
+ # old_post = Post.new(title: "hello world")
+ # coder = {}
+ # old_post.encode_with(coder)
+ #
+ # post = Post.allocate
+ # post.init_with(coder)
+ # post.title # => 'hello world'
+ def init_with(coder)
+ coder = LegacyYamlAdapter.convert(self.class, coder)
+ @attributes = self.class.yaml_encoder.decode(coder)
+
+ init_internals
+
+ @new_record = coder["new_record"]
+
+ self.class.define_attribute_methods
+
+ yield self if block_given?
+
+ _run_find_callbacks
+ _run_initialize_callbacks
+
+ self
+ end
+
+ ##
+ # :method: clone
+ # Identical to Ruby's clone method. This is a "shallow" copy. Be warned that your attributes are not copied.
+ # That means that modifying attributes of the clone will modify the original, since they will both point to the
+ # same attributes hash. If you need a copy of your attributes hash, please use the #dup method.
+ #
+ # user = User.first
+ # new_user = user.clone
+ # user.name # => "Bob"
+ # new_user.name = "Joe"
+ # user.name # => "Joe"
+ #
+ # user.object_id == new_user.object_id # => false
+ # user.name.object_id == new_user.name.object_id # => true
+ #
+ # user.name.object_id == user.dup.name.object_id # => false
+
+ ##
+ # :method: dup
+ # Duped objects have no id assigned and are treated as new records. Note
+ # that this is a "shallow" copy as it copies the object's attributes
+ # only, not its associations. The extent of a "deep" copy is application
+ # specific and is therefore left to the application to implement according
+ # to its need.
+ # The dup method does not preserve the timestamps (created|updated)_(at|on).
+
+ ##
+ def initialize_dup(other) # :nodoc:
+ @attributes = @attributes.deep_dup
+ @attributes.reset(self.class.primary_key)
+
+ _run_initialize_callbacks
+
+ @new_record = true
+ @destroyed = false
+
+ super
+ end
+
+ # Populate +coder+ with attributes about this record that should be
+ # serialized. The structure of +coder+ defined in this method is
+ # guaranteed to match the structure of +coder+ passed to the #init_with
+ # method.
+ #
+ # Example:
+ #
+ # class Post < ActiveRecord::Base
+ # end
+ # coder = {}
+ # Post.new.encode_with(coder)
+ # coder # => {"attributes" => {"id" => nil, ... }}
+ def encode_with(coder)
+ self.class.yaml_encoder.encode(@attributes, coder)
+ coder["new_record"] = new_record?
+ coder["active_record_yaml_version"] = 2
+ end
+
+ # Returns true if +comparison_object+ is the same exact object, or +comparison_object+
+ # is of the same type and +self+ has an ID and it is equal to +comparison_object.id+.
+ #
+ # Note that new records are different from any other record by definition, unless the
+ # other record is the receiver itself. Besides, if you fetch existing records with
+ # +select+ and leave the ID out, you're on your own, this predicate will return false.
+ #
+ # Note also that destroying a record preserves its ID in the model instance, so deleted
+ # models are still comparable.
+ def ==(comparison_object)
+ super ||
+ comparison_object.instance_of?(self.class) &&
+ !id.nil? &&
+ comparison_object.id == id
+ end
+ alias :eql? :==
+
+ # Delegates to id in order to allow two records of the same type and id to work with something like:
+ # [ Person.find(1), Person.find(2), Person.find(3) ] & [ Person.find(1), Person.find(4) ] # => [ Person.find(1) ]
+ def hash
+ if id
+ self.class.hash ^ id.hash
+ else
+ super
+ end
+ end
+
+ # Clone and freeze the attributes hash such that associations are still
+ # accessible, even on destroyed records, but cloned models will not be
+ # frozen.
+ def freeze
+ @attributes = @attributes.clone.freeze
+ self
+ end
+
+ # Returns +true+ if the attributes hash has been frozen.
+ def frozen?
+ @attributes.frozen?
+ end
+
+ # Allows sort on objects
+ def <=>(other_object)
+ if other_object.is_a?(self.class)
+ to_key <=> other_object.to_key
+ else
+ super
+ end
+ end
+
+ # Returns +true+ if the record is read only. Records loaded through joins with piggy-back
+ # attributes will be marked as read only since they cannot be saved.
+ def readonly?
+ @readonly
+ end
+
+ # Marks this record as read only.
+ def readonly!
+ @readonly = true
+ end
+
+ def connection_handler
+ self.class.connection_handler
+ end
+
+ # Returns the contents of the record as a nicely formatted string.
+ def inspect
+ # We check defined?(@attributes) not to issue warnings if the object is
+ # allocated but not initialized.
+ inspection = if defined?(@attributes) && @attributes
+ self.class.attribute_names.collect do |name|
+ if has_attribute?(name)
+ "#{name}: #{attribute_for_inspect(name)}"
+ end
+ end.compact.join(", ")
+ else
+ "not initialized"
+ end
+
+ "#<#{self.class} #{inspection}>"
+ end
+
+ # Takes a PP and prettily prints this record to it, allowing you to get a nice result from <tt>pp record</tt>
+ # when pp is required.
+ def pretty_print(pp)
+ return super if custom_inspect_method_defined?
+ pp.object_address_group(self) do
+ if defined?(@attributes) && @attributes
+ column_names = self.class.column_names.select { |name| has_attribute?(name) || new_record? }
+ pp.seplist(column_names, proc { pp.text "," }) do |column_name|
+ column_value = read_attribute(column_name)
+ pp.breakable " "
+ pp.group(1) do
+ pp.text column_name
+ pp.text ":"
+ pp.breakable
+ pp.pp column_value
+ end
+ end
+ else
+ pp.breakable " "
+ pp.text "not initialized"
+ end
+ end
+ end
+
+ # Returns a hash of the given methods with their names as keys and returned values as values.
+ def slice(*methods)
+ Hash[methods.flatten.map! { |method| [method, public_send(method)] }].with_indifferent_access
+ end
+
+ private
+
+ # +Array#flatten+ will call +#to_ary+ (recursively) on each of the elements of
+ # the array, and then rescues from the possible +NoMethodError+. If those elements are
+ # +ActiveRecord::Base+'s, then this triggers the various +method_missing+'s that we have,
+ # which significantly impacts upon performance.
+ #
+ # So we can avoid the +method_missing+ hit by explicitly defining +#to_ary+ as +nil+ here.
+ #
+ # See also http://tenderlovemaking.com/2011/06/28/til-its-ok-to-return-nil-from-to_ary.html
+ def to_ary
+ nil
+ end
+
+ def init_internals
+ @readonly = false
+ @destroyed = false
+ @marked_for_destruction = false
+ @destroyed_by_association = nil
+ @new_record = true
+ @_start_transaction_state = {}
+ @transaction_state = nil
+ end
+
+ def initialize_internals_callback
+ end
+
+ def thaw
+ if frozen?
+ @attributes = @attributes.dup
+ end
+ end
+
+ def custom_inspect_method_defined?
+ self.class.instance_method(:inspect).owner != ActiveRecord::Base.instance_method(:inspect).owner
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/counter_cache.rb b/activerecord/lib/active_record/counter_cache.rb
new file mode 100644
index 0000000000..5005d58f1c
--- /dev/null
+++ b/activerecord/lib/active_record/counter_cache.rb
@@ -0,0 +1,214 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record Counter Cache
+ module CounterCache
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ # Resets one or more counter caches to their correct value using an SQL
+ # count query. This is useful when adding new counter caches, or if the
+ # counter has been corrupted or modified directly by SQL.
+ #
+ # ==== Parameters
+ #
+ # * +id+ - The id of the object you wish to reset a counter on.
+ # * +counters+ - One or more association counters to reset. Association name or counter name can be given.
+ # * <tt>:touch</tt> - Touch timestamp columns when updating.
+ # Pass +true+ to touch +updated_at+ and/or +updated_on+. Pass a symbol to
+ # touch that column or an array of symbols to touch just those ones.
+ #
+ # ==== Examples
+ #
+ # # For the Post with id #1, reset the comments_count
+ # Post.reset_counters(1, :comments)
+ #
+ # # Like above, but also touch the +updated_at+ and/or +updated_on+
+ # # attributes.
+ # Post.reset_counters(1, :comments, touch: true)
+ def reset_counters(id, *counters, touch: nil)
+ object = find(id)
+
+ counters.each do |counter_association|
+ has_many_association = _reflect_on_association(counter_association)
+ unless has_many_association
+ has_many = reflect_on_all_associations(:has_many)
+ has_many_association = has_many.find { |association| association.counter_cache_column && association.counter_cache_column.to_sym == counter_association.to_sym }
+ counter_association = has_many_association.plural_name if has_many_association
+ end
+ raise ArgumentError, "'#{name}' has no association called '#{counter_association}'" unless has_many_association
+
+ if has_many_association.is_a? ActiveRecord::Reflection::ThroughReflection
+ has_many_association = has_many_association.through_reflection
+ end
+
+ foreign_key = has_many_association.foreign_key.to_s
+ child_class = has_many_association.klass
+ reflection = child_class._reflections.values.find { |e| e.belongs_to? && e.foreign_key.to_s == foreign_key && e.options[:counter_cache].present? }
+ counter_name = reflection.counter_cache_column
+
+ updates = { counter_name.to_sym => object.send(counter_association).count(:all) }
+ updates.merge!(touch_updates(touch)) if touch
+
+ unscoped.where(primary_key => object.id).update_all(updates)
+ end
+
+ return true
+ end
+
+ # A generic "counter updater" implementation, intended primarily to be
+ # used by #increment_counter and #decrement_counter, but which may also
+ # be useful on its own. It simply does a direct SQL update for the record
+ # with the given ID, altering the given hash of counters by the amount
+ # given by the corresponding value:
+ #
+ # ==== Parameters
+ #
+ # * +id+ - The id of the object you wish to update a counter on or an array of ids.
+ # * +counters+ - A Hash containing the names of the fields
+ # to update as keys and the amount to update the field by as values.
+ # * <tt>:touch</tt> option - Touch timestamp columns when updating.
+ # Pass +true+ to touch +updated_at+ and/or +updated_on+. Pass a symbol to
+ # touch that column or an array of symbols to touch just those ones.
+ #
+ # ==== Examples
+ #
+ # # For the Post with id of 5, decrement the comment_count by 1, and
+ # # increment the action_count by 1
+ # Post.update_counters 5, comment_count: -1, action_count: 1
+ # # Executes the following SQL:
+ # # UPDATE posts
+ # # SET comment_count = COALESCE(comment_count, 0) - 1,
+ # # action_count = COALESCE(action_count, 0) + 1
+ # # WHERE id = 5
+ #
+ # # For the Posts with id of 10 and 15, increment the comment_count by 1
+ # Post.update_counters [10, 15], comment_count: 1
+ # # Executes the following SQL:
+ # # UPDATE posts
+ # # SET comment_count = COALESCE(comment_count, 0) + 1
+ # # WHERE id IN (10, 15)
+ #
+ # # For the Posts with id of 10 and 15, increment the comment_count by 1
+ # # and update the updated_at value for each counter.
+ # Post.update_counters [10, 15], comment_count: 1, touch: true
+ # # Executes the following SQL:
+ # # UPDATE posts
+ # # SET comment_count = COALESCE(comment_count, 0) + 1,
+ # # `updated_at` = '2016-10-13T09:59:23-05:00'
+ # # WHERE id IN (10, 15)
+ def update_counters(id, counters)
+ touch = counters.delete(:touch)
+
+ updates = counters.map do |counter_name, value|
+ operator = value < 0 ? "-" : "+"
+ quoted_column = connection.quote_column_name(counter_name)
+ "#{quoted_column} = COALESCE(#{quoted_column}, 0) #{operator} #{value.abs}"
+ end
+
+ if touch
+ touch_updates = touch_updates(touch)
+ updates << sanitize_sql_for_assignment(touch_updates) unless touch_updates.empty?
+ end
+
+ unscoped.where(primary_key => id).update_all updates.join(", ")
+ end
+
+ # Increment a numeric field by one, via a direct SQL update.
+ #
+ # This method is used primarily for maintaining counter_cache columns that are
+ # used to store aggregate values. For example, a +DiscussionBoard+ may cache
+ # posts_count and comments_count to avoid running an SQL query to calculate the
+ # number of posts and comments there are, each time it is displayed.
+ #
+ # ==== Parameters
+ #
+ # * +counter_name+ - The name of the field that should be incremented.
+ # * +id+ - The id of the object that should be incremented or an array of ids.
+ # * <tt>:touch</tt> - Touch timestamp columns when updating.
+ # Pass +true+ to touch +updated_at+ and/or +updated_on+. Pass a symbol to
+ # touch that column or an array of symbols to touch just those ones.
+ #
+ # ==== Examples
+ #
+ # # Increment the posts_count column for the record with an id of 5
+ # DiscussionBoard.increment_counter(:posts_count, 5)
+ #
+ # # Increment the posts_count column for the record with an id of 5
+ # # and update the updated_at value.
+ # DiscussionBoard.increment_counter(:posts_count, 5, touch: true)
+ def increment_counter(counter_name, id, touch: nil)
+ update_counters(id, counter_name => 1, touch: touch)
+ end
+
+ # Decrement a numeric field by one, via a direct SQL update.
+ #
+ # This works the same as #increment_counter but reduces the column value by
+ # 1 instead of increasing it.
+ #
+ # ==== Parameters
+ #
+ # * +counter_name+ - The name of the field that should be decremented.
+ # * +id+ - The id of the object that should be decremented or an array of ids.
+ # * <tt>:touch</tt> - Touch timestamp columns when updating.
+ # Pass +true+ to touch +updated_at+ and/or +updated_on+. Pass a symbol to
+ # touch that column or an array of symbols to touch just those ones.
+ #
+ # ==== Examples
+ #
+ # # Decrement the posts_count column for the record with an id of 5
+ # DiscussionBoard.decrement_counter(:posts_count, 5)
+ #
+ # # Decrement the posts_count column for the record with an id of 5
+ # # and update the updated_at value.
+ # DiscussionBoard.decrement_counter(:posts_count, 5, touch: true)
+ def decrement_counter(counter_name, id, touch: nil)
+ update_counters(id, counter_name => -1, touch: touch)
+ end
+
+ private
+ def touch_updates(touch)
+ touch = timestamp_attributes_for_update_in_model if touch == true
+ touch_time = current_time_from_proper_timezone
+ Array(touch).map { |column| [ column, touch_time ] }.to_h
+ end
+ end
+
+ private
+
+ def _create_record(*)
+ id = super
+
+ each_counter_cached_associations do |association|
+ if send(association.reflection.name)
+ association.increment_counters
+ end
+ end
+
+ id
+ end
+
+ def destroy_row
+ affected_rows = super
+
+ if affected_rows > 0
+ each_counter_cached_associations do |association|
+ foreign_key = association.reflection.foreign_key.to_sym
+ unless destroyed_by_association && destroyed_by_association.foreign_key.to_sym == foreign_key
+ if send(association.reflection.name)
+ association.decrement_counters
+ end
+ end
+ end
+ end
+
+ affected_rows
+ end
+
+ def each_counter_cached_associations
+ _reflections.each do |name, reflection|
+ yield association(name.to_sym) if reflection.belongs_to? && reflection.counter_cache_column
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/define_callbacks.rb b/activerecord/lib/active_record/define_callbacks.rb
new file mode 100644
index 0000000000..2c8783dcc9
--- /dev/null
+++ b/activerecord/lib/active_record/define_callbacks.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # This module exists because `ActiveRecord::AttributeMethods::Dirty` needs to
+ # define callbacks, but continue to have its version of `save` be the super
+ # method of `ActiveRecord::Callbacks`. This will be removed when the removal
+ # of deprecated code removes this need.
+ module DefineCallbacks
+ extend ActiveSupport::Concern
+
+ module ClassMethods # :nodoc:
+ include ActiveModel::Callbacks
+ end
+
+ included do
+ include ActiveModel::Validations::Callbacks
+
+ define_model_callbacks :initialize, :find, :touch, only: :after
+ define_model_callbacks :save, :create, :update, :destroy
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/dynamic_matchers.rb b/activerecord/lib/active_record/dynamic_matchers.rb
new file mode 100644
index 0000000000..3bb8c6f4e3
--- /dev/null
+++ b/activerecord/lib/active_record/dynamic_matchers.rb
@@ -0,0 +1,122 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module DynamicMatchers #:nodoc:
+ private
+ def respond_to_missing?(name, _)
+ if self == Base
+ super
+ else
+ match = Method.match(self, name)
+ match && match.valid? || super
+ end
+ end
+
+ def method_missing(name, *arguments, &block)
+ match = Method.match(self, name)
+
+ if match && match.valid?
+ match.define
+ send(name, *arguments, &block)
+ else
+ super
+ end
+ end
+
+ class Method
+ @matchers = []
+
+ class << self
+ attr_reader :matchers
+
+ def match(model, name)
+ klass = matchers.find { |k| k.pattern.match?(name) }
+ klass.new(model, name) if klass
+ end
+
+ def pattern
+ @pattern ||= /\A#{prefix}_([_a-zA-Z]\w*)#{suffix}\Z/
+ end
+
+ def prefix
+ raise NotImplementedError
+ end
+
+ def suffix
+ ""
+ end
+ end
+
+ attr_reader :model, :name, :attribute_names
+
+ def initialize(model, name)
+ @model = model
+ @name = name.to_s
+ @attribute_names = @name.match(self.class.pattern)[1].split("_and_")
+ @attribute_names.map! { |n| @model.attribute_aliases[n] || n }
+ end
+
+ def valid?
+ attribute_names.all? { |name| model.columns_hash[name] || model.reflect_on_aggregation(name.to_sym) }
+ end
+
+ def define
+ model.class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def self.#{name}(#{signature})
+ #{body}
+ end
+ CODE
+ end
+
+ private
+
+ def body
+ "#{finder}(#{attributes_hash})"
+ end
+
+ # The parameters in the signature may have reserved Ruby words, in order
+ # to prevent errors, we start each param name with `_`.
+ def signature
+ attribute_names.map { |name| "_#{name}" }.join(", ")
+ end
+
+ # Given that the parameters starts with `_`, the finder needs to use the
+ # same parameter name.
+ def attributes_hash
+ "{" + attribute_names.map { |name| ":#{name} => _#{name}" }.join(",") + "}"
+ end
+
+ def finder
+ raise NotImplementedError
+ end
+ end
+
+ class FindBy < Method
+ Method.matchers << self
+
+ def self.prefix
+ "find_by"
+ end
+
+ def finder
+ "find_by"
+ end
+ end
+
+ class FindByBang < Method
+ Method.matchers << self
+
+ def self.prefix
+ "find_by"
+ end
+
+ def self.suffix
+ "!"
+ end
+
+ def finder
+ "find_by!"
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/enum.rb b/activerecord/lib/active_record/enum.rb
new file mode 100644
index 0000000000..f373b98035
--- /dev/null
+++ b/activerecord/lib/active_record/enum.rb
@@ -0,0 +1,241 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/object/deep_dup"
+
+module ActiveRecord
+ # Declare an enum attribute where the values map to integers in the database,
+ # but can be queried by name. Example:
+ #
+ # class Conversation < ActiveRecord::Base
+ # enum status: [ :active, :archived ]
+ # end
+ #
+ # # conversation.update! status: 0
+ # conversation.active!
+ # conversation.active? # => true
+ # conversation.status # => "active"
+ #
+ # # conversation.update! status: 1
+ # conversation.archived!
+ # conversation.archived? # => true
+ # conversation.status # => "archived"
+ #
+ # # conversation.status = 1
+ # conversation.status = "archived"
+ #
+ # conversation.status = nil
+ # conversation.status.nil? # => true
+ # conversation.status # => nil
+ #
+ # Scopes based on the allowed values of the enum field will be provided
+ # as well. With the above example:
+ #
+ # Conversation.active
+ # Conversation.archived
+ #
+ # Of course, you can also query them directly if the scopes don't fit your
+ # needs:
+ #
+ # Conversation.where(status: [:active, :archived])
+ # Conversation.where.not(status: :active)
+ #
+ # You can set the default value from the database declaration, like:
+ #
+ # create_table :conversations do |t|
+ # t.column :status, :integer, default: 0
+ # end
+ #
+ # Good practice is to let the first declared status be the default.
+ #
+ # Finally, it's also possible to explicitly map the relation between attribute and
+ # database integer with a hash:
+ #
+ # class Conversation < ActiveRecord::Base
+ # enum status: { active: 0, archived: 1 }
+ # end
+ #
+ # Note that when an array is used, the implicit mapping from the values to database
+ # integers is derived from the order the values appear in the array. In the example,
+ # <tt>:active</tt> is mapped to +0+ as it's the first element, and <tt>:archived</tt>
+ # is mapped to +1+. In general, the +i+-th element is mapped to <tt>i-1</tt> in the
+ # database.
+ #
+ # Therefore, once a value is added to the enum array, its position in the array must
+ # be maintained, and new values should only be added to the end of the array. To
+ # remove unused values, the explicit hash syntax should be used.
+ #
+ # In rare circumstances you might need to access the mapping directly.
+ # The mappings are exposed through a class method with the pluralized attribute
+ # name, which return the mapping in a +HashWithIndifferentAccess+:
+ #
+ # Conversation.statuses[:active] # => 0
+ # Conversation.statuses["archived"] # => 1
+ #
+ # Use that class method when you need to know the ordinal value of an enum.
+ # For example, you can use that when manually building SQL strings:
+ #
+ # Conversation.where("status <> ?", Conversation.statuses[:archived])
+ #
+ # You can use the +:_prefix+ or +:_suffix+ options when you need to define
+ # multiple enums with same values. If the passed value is +true+, the methods
+ # are prefixed/suffixed with the name of the enum. It is also possible to
+ # supply a custom value:
+ #
+ # class Conversation < ActiveRecord::Base
+ # enum status: [:active, :archived], _suffix: true
+ # enum comments_status: [:active, :inactive], _prefix: :comments
+ # end
+ #
+ # With the above example, the bang and predicate methods along with the
+ # associated scopes are now prefixed and/or suffixed accordingly:
+ #
+ # conversation.active_status!
+ # conversation.archived_status? # => false
+ #
+ # conversation.comments_inactive!
+ # conversation.comments_active? # => false
+
+ module Enum
+ def self.extended(base) # :nodoc:
+ base.class_attribute(:defined_enums, instance_writer: false, default: {})
+ end
+
+ def inherited(base) # :nodoc:
+ base.defined_enums = defined_enums.deep_dup
+ super
+ end
+
+ class EnumType < Type::Value # :nodoc:
+ delegate :type, to: :subtype
+
+ def initialize(name, mapping, subtype)
+ @name = name
+ @mapping = mapping
+ @subtype = subtype
+ end
+
+ def cast(value)
+ return if value.blank?
+
+ if mapping.has_key?(value)
+ value.to_s
+ elsif mapping.has_value?(value)
+ mapping.key(value)
+ else
+ assert_valid_value(value)
+ end
+ end
+
+ def deserialize(value)
+ return if value.nil?
+ mapping.key(subtype.deserialize(value))
+ end
+
+ def serialize(value)
+ mapping.fetch(value, value)
+ end
+
+ def assert_valid_value(value)
+ unless value.blank? || mapping.has_key?(value) || mapping.has_value?(value)
+ raise ArgumentError, "'#{value}' is not a valid #{name}"
+ end
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+
+ attr_reader :name, :mapping, :subtype
+ end
+
+ def enum(definitions)
+ klass = self
+ enum_prefix = definitions.delete(:_prefix)
+ enum_suffix = definitions.delete(:_suffix)
+ definitions.each do |name, values|
+ # statuses = { }
+ enum_values = ActiveSupport::HashWithIndifferentAccess.new
+ name = name.to_s
+
+ # def self.statuses() statuses end
+ detect_enum_conflict!(name, name.pluralize, true)
+ singleton_class.send(:define_method, name.pluralize) { enum_values }
+ defined_enums[name] = enum_values
+
+ detect_enum_conflict!(name, name)
+ detect_enum_conflict!(name, "#{name}=")
+
+ attr = attribute_alias?(name) ? attribute_alias(name) : name
+ decorate_attribute_type(attr, :enum) do |subtype|
+ EnumType.new(attr, enum_values, subtype)
+ end
+
+ _enum_methods_module.module_eval do
+ pairs = values.respond_to?(:each_pair) ? values.each_pair : values.each_with_index
+ pairs.each do |label, value|
+ if enum_prefix == true
+ prefix = "#{name}_"
+ elsif enum_prefix
+ prefix = "#{enum_prefix}_"
+ end
+ if enum_suffix == true
+ suffix = "_#{name}"
+ elsif enum_suffix
+ suffix = "_#{enum_suffix}"
+ end
+
+ value_method_name = "#{prefix}#{label}#{suffix}"
+ enum_values[label] = value
+ label = label.to_s
+
+ # def active?() status == "active" end
+ klass.send(:detect_enum_conflict!, name, "#{value_method_name}?")
+ define_method("#{value_method_name}?") { self[attr] == label }
+
+ # def active!() update!(status: 0) end
+ klass.send(:detect_enum_conflict!, name, "#{value_method_name}!")
+ define_method("#{value_method_name}!") { update!(attr => value) }
+
+ # scope :active, -> { where(status: 0) }
+ klass.send(:detect_enum_conflict!, name, value_method_name, true)
+ klass.scope value_method_name, -> { where(attr => value) }
+ end
+ end
+ end
+ end
+
+ private
+ def _enum_methods_module
+ @_enum_methods_module ||= begin
+ mod = Module.new
+ include mod
+ mod
+ end
+ end
+
+ ENUM_CONFLICT_MESSAGE = \
+ "You tried to define an enum named \"%{enum}\" on the model \"%{klass}\", but " \
+ "this will generate a %{type} method \"%{method}\", which is already defined " \
+ "by %{source}."
+
+ def detect_enum_conflict!(enum_name, method_name, klass_method = false)
+ if klass_method && dangerous_class_method?(method_name)
+ raise_conflict_error(enum_name, method_name, type: "class")
+ elsif !klass_method && dangerous_attribute_method?(method_name)
+ raise_conflict_error(enum_name, method_name)
+ elsif !klass_method && method_defined_within?(method_name, _enum_methods_module, Module)
+ raise_conflict_error(enum_name, method_name, source: "another enum")
+ end
+ end
+
+ def raise_conflict_error(enum_name, method_name, type: "instance", source: "Active Record")
+ raise ArgumentError, ENUM_CONFLICT_MESSAGE % {
+ enum: enum_name,
+ klass: name,
+ type: type,
+ method: method_name,
+ source: source
+ }
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/errors.rb b/activerecord/lib/active_record/errors.rb
new file mode 100644
index 0000000000..933589d4b1
--- /dev/null
+++ b/activerecord/lib/active_record/errors.rb
@@ -0,0 +1,337 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record Errors
+ #
+ # Generic Active Record exception class.
+ class ActiveRecordError < StandardError
+ end
+
+ # Raised when the single-table inheritance mechanism fails to locate the subclass
+ # (for example due to improper usage of column that
+ # {ActiveRecord::Base.inheritance_column}[rdoc-ref:ModelSchema::ClassMethods#inheritance_column]
+ # points to).
+ class SubclassNotFound < ActiveRecordError
+ end
+
+ # Raised when an object assigned to an association has an incorrect type.
+ #
+ # class Ticket < ActiveRecord::Base
+ # has_many :patches
+ # end
+ #
+ # class Patch < ActiveRecord::Base
+ # belongs_to :ticket
+ # end
+ #
+ # # Comments are not patches, this assignment raises AssociationTypeMismatch.
+ # @ticket.patches << Comment.new(content: "Please attach tests to your patch.")
+ class AssociationTypeMismatch < ActiveRecordError
+ end
+
+ # Raised when unserialized object's type mismatches one specified for serializable field.
+ class SerializationTypeMismatch < ActiveRecordError
+ end
+
+ # Raised when adapter not specified on connection (or configuration file
+ # +config/database.yml+ misses adapter field).
+ class AdapterNotSpecified < ActiveRecordError
+ end
+
+ # Raised when Active Record cannot find database adapter specified in
+ # +config/database.yml+ or programmatically.
+ class AdapterNotFound < ActiveRecordError
+ end
+
+ # Raised when connection to the database could not been established (for example when
+ # {ActiveRecord::Base.connection=}[rdoc-ref:ConnectionHandling#connection]
+ # is given a +nil+ object).
+ class ConnectionNotEstablished < ActiveRecordError
+ end
+
+ # Raised when Active Record cannot find a record by given id or set of ids.
+ class RecordNotFound < ActiveRecordError
+ attr_reader :model, :primary_key, :id
+
+ def initialize(message = nil, model = nil, primary_key = nil, id = nil)
+ @primary_key = primary_key
+ @model = model
+ @id = id
+
+ super(message)
+ end
+ end
+
+ # Raised by {ActiveRecord::Base#save!}[rdoc-ref:Persistence#save!] and
+ # {ActiveRecord::Base.create!}[rdoc-ref:Persistence::ClassMethods#create!]
+ # methods when a record is invalid and can not be saved.
+ class RecordNotSaved < ActiveRecordError
+ attr_reader :record
+
+ def initialize(message = nil, record = nil)
+ @record = record
+ super(message)
+ end
+ end
+
+ # Raised by {ActiveRecord::Base#destroy!}[rdoc-ref:Persistence#destroy!]
+ # when a call to {#destroy}[rdoc-ref:Persistence#destroy!]
+ # would return false.
+ #
+ # begin
+ # complex_operation_that_internally_calls_destroy!
+ # rescue ActiveRecord::RecordNotDestroyed => invalid
+ # puts invalid.record.errors
+ # end
+ #
+ class RecordNotDestroyed < ActiveRecordError
+ attr_reader :record
+
+ def initialize(message = nil, record = nil)
+ @record = record
+ super(message)
+ end
+ end
+
+ # Superclass for all database execution errors.
+ #
+ # Wraps the underlying database error as +cause+.
+ class StatementInvalid < ActiveRecordError
+ def initialize(message = nil)
+ super(message || $!.try(:message))
+ end
+ end
+
+ # Defunct wrapper class kept for compatibility.
+ # StatementInvalid wraps the original exception now.
+ class WrappedDatabaseException < StatementInvalid
+ end
+
+ # Raised when a record cannot be inserted or updated because it would violate a uniqueness constraint.
+ class RecordNotUnique < WrappedDatabaseException
+ end
+
+ # Raised when a record cannot be inserted or updated because it references a non-existent record.
+ class InvalidForeignKey < WrappedDatabaseException
+ end
+
+ # Raised when a foreign key constraint cannot be added because the column type does not match the referenced column type.
+ class MismatchedForeignKey < StatementInvalid
+ def initialize(adapter = nil, message: nil, table: nil, foreign_key: nil, target_table: nil, primary_key: nil)
+ @adapter = adapter
+ if table
+ msg = <<-EOM.strip_heredoc
+ Column `#{foreign_key}` on table `#{table}` has a type of `#{column_type(table, foreign_key)}`.
+ This does not match column `#{primary_key}` on `#{target_table}`, which has type `#{column_type(target_table, primary_key)}`.
+ To resolve this issue, change the type of the `#{foreign_key}` column on `#{table}` to be :integer. (For example `t.integer #{foreign_key}`).
+ EOM
+ else
+ msg = <<-EOM
+ There is a mismatch between the foreign key and primary key column types.
+ Verify that the foreign key column type and the primary key of the associated table match types.
+ EOM
+ end
+ if message
+ msg << "\nOriginal message: #{message}"
+ end
+ super(msg)
+ end
+
+ private
+ def column_type(table, column)
+ @adapter.columns(table).detect { |c| c.name == column }.sql_type
+ end
+ end
+
+ # Raised when a record cannot be inserted or updated because it would violate a not null constraint.
+ class NotNullViolation < StatementInvalid
+ end
+
+ # Raised when a record cannot be inserted or updated because a value too long for a column type.
+ class ValueTooLong < StatementInvalid
+ end
+
+ # Raised when values that executed are out of range.
+ class RangeError < StatementInvalid
+ end
+
+ # Raised when number of bind variables in statement given to +:condition+ key
+ # (for example, when using {ActiveRecord::Base.find}[rdoc-ref:FinderMethods#find] method)
+ # does not match number of expected values supplied.
+ #
+ # For example, when there are two placeholders with only one value supplied:
+ #
+ # Location.where("lat = ? AND lng = ?", 53.7362)
+ class PreparedStatementInvalid < ActiveRecordError
+ end
+
+ # Raised when a given database does not exist.
+ class NoDatabaseError < StatementInvalid
+ end
+
+ # Raised when Postgres returns 'cached plan must not change result type' and
+ # we cannot retry gracefully (e.g. inside a transaction)
+ class PreparedStatementCacheExpired < StatementInvalid
+ end
+
+ # Raised on attempt to save stale record. Record is stale when it's being saved in another query after
+ # instantiation, for example, when two users edit the same wiki page and one starts editing and saves
+ # the page before the other.
+ #
+ # Read more about optimistic locking in ActiveRecord::Locking module
+ # documentation.
+ class StaleObjectError < ActiveRecordError
+ attr_reader :record, :attempted_action
+
+ def initialize(record = nil, attempted_action = nil)
+ if record && attempted_action
+ @record = record
+ @attempted_action = attempted_action
+ super("Attempted to #{attempted_action} a stale object: #{record.class.name}.")
+ else
+ super("Stale object error.")
+ end
+ end
+ end
+
+ # Raised when association is being configured improperly or user tries to use
+ # offset and limit together with
+ # {ActiveRecord::Base.has_many}[rdoc-ref:Associations::ClassMethods#has_many] or
+ # {ActiveRecord::Base.has_and_belongs_to_many}[rdoc-ref:Associations::ClassMethods#has_and_belongs_to_many]
+ # associations.
+ class ConfigurationError < ActiveRecordError
+ end
+
+ # Raised on attempt to update record that is instantiated as read only.
+ class ReadOnlyRecord < ActiveRecordError
+ end
+
+ # {ActiveRecord::Base.transaction}[rdoc-ref:Transactions::ClassMethods#transaction]
+ # uses this exception to distinguish a deliberate rollback from other exceptional situations.
+ # Normally, raising an exception will cause the
+ # {.transaction}[rdoc-ref:Transactions::ClassMethods#transaction] method to rollback
+ # the database transaction *and* pass on the exception. But if you raise an
+ # ActiveRecord::Rollback exception, then the database transaction will be rolled back,
+ # without passing on the exception.
+ #
+ # For example, you could do this in your controller to rollback a transaction:
+ #
+ # class BooksController < ActionController::Base
+ # def create
+ # Book.transaction do
+ # book = Book.new(params[:book])
+ # book.save!
+ # if today_is_friday?
+ # # The system must fail on Friday so that our support department
+ # # won't be out of job. We silently rollback this transaction
+ # # without telling the user.
+ # raise ActiveRecord::Rollback, "Call tech support!"
+ # end
+ # end
+ # # ActiveRecord::Rollback is the only exception that won't be passed on
+ # # by ActiveRecord::Base.transaction, so this line will still be reached
+ # # even on Friday.
+ # redirect_to root_url
+ # end
+ # end
+ class Rollback < ActiveRecordError
+ end
+
+ # Raised when attribute has a name reserved by Active Record (when attribute
+ # has name of one of Active Record instance methods).
+ class DangerousAttributeError < ActiveRecordError
+ end
+
+ # Raised when unknown attributes are supplied via mass assignment.
+ UnknownAttributeError = ActiveModel::UnknownAttributeError
+
+ # Raised when an error occurred while doing a mass assignment to an attribute through the
+ # {ActiveRecord::Base#attributes=}[rdoc-ref:AttributeAssignment#attributes=] method.
+ # The exception has an +attribute+ property that is the name of the offending attribute.
+ class AttributeAssignmentError < ActiveRecordError
+ attr_reader :exception, :attribute
+
+ def initialize(message = nil, exception = nil, attribute = nil)
+ super(message)
+ @exception = exception
+ @attribute = attribute
+ end
+ end
+
+ # Raised when there are multiple errors while doing a mass assignment through the
+ # {ActiveRecord::Base#attributes=}[rdoc-ref:AttributeAssignment#attributes=]
+ # method. The exception has an +errors+ property that contains an array of AttributeAssignmentError
+ # objects, each corresponding to the error while assigning to an attribute.
+ class MultiparameterAssignmentErrors < ActiveRecordError
+ attr_reader :errors
+
+ def initialize(errors = nil)
+ @errors = errors
+ end
+ end
+
+ # Raised when a primary key is needed, but not specified in the schema or model.
+ class UnknownPrimaryKey < ActiveRecordError
+ attr_reader :model
+
+ def initialize(model = nil, description = nil)
+ if model
+ message = "Unknown primary key for table #{model.table_name} in model #{model}."
+ message += "\n#{description}" if description
+ @model = model
+ super(message)
+ else
+ super("Unknown primary key.")
+ end
+ end
+ end
+
+ # Raised when a relation cannot be mutated because it's already loaded.
+ #
+ # class Task < ActiveRecord::Base
+ # end
+ #
+ # relation = Task.all
+ # relation.loaded? # => true
+ #
+ # # Methods which try to mutate a loaded relation fail.
+ # relation.where!(title: 'TODO') # => ActiveRecord::ImmutableRelation
+ # relation.limit!(5) # => ActiveRecord::ImmutableRelation
+ class ImmutableRelation < ActiveRecordError
+ end
+
+ # TransactionIsolationError will be raised under the following conditions:
+ #
+ # * The adapter does not support setting the isolation level
+ # * You are joining an existing open transaction
+ # * You are creating a nested (savepoint) transaction
+ #
+ # The mysql2 and postgresql adapters support setting the transaction isolation level.
+ class TransactionIsolationError < ActiveRecordError
+ end
+
+ # TransactionRollbackError will be raised when a transaction is rolled
+ # back by the database due to a serialization failure or a deadlock.
+ #
+ # See the following:
+ #
+ # * https://www.postgresql.org/docs/current/static/transaction-iso.html
+ # * https://dev.mysql.com/doc/refman/5.7/en/error-messages-server.html#error_er_lock_deadlock
+ class TransactionRollbackError < StatementInvalid
+ end
+
+ # SerializationFailure will be raised when a transaction is rolled
+ # back by the database due to a serialization failure.
+ class SerializationFailure < TransactionRollbackError
+ end
+
+ # Deadlocked will be raised when a transaction is rolled
+ # back by the database when a deadlock is encountered.
+ class Deadlocked < TransactionRollbackError
+ end
+
+ # IrreversibleOrderError is raised when a relation's order is too complex for
+ # +reverse_order+ to automatically reverse.
+ class IrreversibleOrderError < ActiveRecordError
+ end
+end
diff --git a/activerecord/lib/active_record/explain.rb b/activerecord/lib/active_record/explain.rb
new file mode 100644
index 0000000000..8bb54a24b7
--- /dev/null
+++ b/activerecord/lib/active_record/explain.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require_relative "explain_registry"
+
+module ActiveRecord
+ module Explain
+ # Executes the block with the collect flag enabled. Queries are collected
+ # asynchronously by the subscriber and returned.
+ def collecting_queries_for_explain # :nodoc:
+ ExplainRegistry.collect = true
+ yield
+ ExplainRegistry.queries
+ ensure
+ ExplainRegistry.reset
+ end
+
+ # Makes the adapter execute EXPLAIN for the tuples of queries and bindings.
+ # Returns a formatted string ready to be logged.
+ def exec_explain(queries) # :nodoc:
+ str = queries.map do |sql, binds|
+ msg = "EXPLAIN for: #{sql}".dup
+ unless binds.empty?
+ msg << " "
+ msg << binds.map { |attr| render_bind(attr) }.inspect
+ end
+ msg << "\n"
+ msg << connection.explain(sql, binds)
+ end.join("\n")
+
+ # Overriding inspect to be more human readable, especially in the console.
+ def str.inspect
+ self
+ end
+
+ str
+ end
+
+ private
+
+ def render_bind(attr)
+ value = if attr.type.binary? && attr.value
+ "<#{attr.value_for_database.to_s.bytesize} bytes of binary data>"
+ else
+ connection.type_cast(attr.value_for_database)
+ end
+
+ [attr.name, value]
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/explain_registry.rb b/activerecord/lib/active_record/explain_registry.rb
new file mode 100644
index 0000000000..7fd078941a
--- /dev/null
+++ b/activerecord/lib/active_record/explain_registry.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require "active_support/per_thread_registry"
+
+module ActiveRecord
+ # This is a thread locals registry for EXPLAIN. For example
+ #
+ # ActiveRecord::ExplainRegistry.queries
+ #
+ # returns the collected queries local to the current thread.
+ #
+ # See the documentation of ActiveSupport::PerThreadRegistry
+ # for further details.
+ class ExplainRegistry # :nodoc:
+ extend ActiveSupport::PerThreadRegistry
+
+ attr_accessor :queries, :collect
+
+ def initialize
+ reset
+ end
+
+ def collect?
+ @collect
+ end
+
+ def reset
+ @collect = false
+ @queries = []
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/explain_subscriber.rb b/activerecord/lib/active_record/explain_subscriber.rb
new file mode 100644
index 0000000000..9252fa3fed
--- /dev/null
+++ b/activerecord/lib/active_record/explain_subscriber.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require "active_support/notifications"
+require_relative "explain_registry"
+
+module ActiveRecord
+ class ExplainSubscriber # :nodoc:
+ def start(name, id, payload)
+ # unused
+ end
+
+ def finish(name, id, payload)
+ if ExplainRegistry.collect? && !ignore_payload?(payload)
+ ExplainRegistry.queries << payload.values_at(:sql, :binds)
+ end
+ end
+
+ # SCHEMA queries cannot be EXPLAINed, also we do not want to run EXPLAIN on
+ # our own EXPLAINs no matter how loopingly beautiful that would be.
+ #
+ # On the other hand, we want to monitor the performance of our real database
+ # queries, not the performance of the access to the query cache.
+ IGNORED_PAYLOADS = %w(SCHEMA EXPLAIN)
+ EXPLAINED_SQLS = /\A\s*(with|select|update|delete|insert)\b/i
+ def ignore_payload?(payload)
+ payload[:exception] ||
+ payload[:cached] ||
+ IGNORED_PAYLOADS.include?(payload[:name]) ||
+ payload[:sql] !~ EXPLAINED_SQLS
+ end
+
+ ActiveSupport::Notifications.subscribe("sql.active_record", new)
+ end
+end
diff --git a/activerecord/lib/active_record/fixture_set/file.rb b/activerecord/lib/active_record/fixture_set/file.rb
new file mode 100644
index 0000000000..f1ea0e022f
--- /dev/null
+++ b/activerecord/lib/active_record/fixture_set/file.rb
@@ -0,0 +1,82 @@
+# frozen_string_literal: true
+
+require "erb"
+require "yaml"
+
+module ActiveRecord
+ class FixtureSet
+ class File # :nodoc:
+ include Enumerable
+
+ ##
+ # Open a fixture file named +file+. When called with a block, the block
+ # is called with the filehandle and the filehandle is automatically closed
+ # when the block finishes.
+ def self.open(file)
+ x = new file
+ block_given? ? yield(x) : x
+ end
+
+ def initialize(file)
+ @file = file
+ end
+
+ def each(&block)
+ rows.each(&block)
+ end
+
+ def model_class
+ config_row["model_class"]
+ end
+
+ private
+ def rows
+ @rows ||= raw_rows.reject { |fixture_name, _| fixture_name == "_fixture" }
+ end
+
+ def config_row
+ @config_row ||= begin
+ row = raw_rows.find { |fixture_name, _| fixture_name == "_fixture" }
+ if row
+ row.last
+ else
+ { 'model_class': nil }
+ end
+ end
+ end
+
+ def raw_rows
+ @raw_rows ||= begin
+ data = YAML.load(render(IO.read(@file)))
+ data ? validate(data).to_a : []
+ rescue ArgumentError, Psych::SyntaxError => error
+ raise Fixture::FormatError, "a YAML error occurred parsing #{@file}. Please note that YAML must be consistently indented using spaces. Tabs are not allowed. Please have a look at http://www.yaml.org/faq.html\nThe exact error was:\n #{error.class}: #{error}", error.backtrace
+ end
+ end
+
+ def prepare_erb(content)
+ erb = ERB.new(content)
+ erb.filename = @file
+ erb
+ end
+
+ def render(content)
+ context = ActiveRecord::FixtureSet::RenderContext.create_subclass.new
+ prepare_erb(content).result(context.get_binding)
+ end
+
+ # Validate our unmarshalled data.
+ def validate(data)
+ unless Hash === data || YAML::Omap === data
+ raise Fixture::FormatError, "fixture is not a hash: #{@file}"
+ end
+
+ invalid = data.reject { |_, row| Hash === row }
+ if invalid.any?
+ raise Fixture::FormatError, "fixture key is not a hash: #{@file}, keys: #{invalid.keys.inspect}"
+ end
+ data
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/fixtures.rb b/activerecord/lib/active_record/fixtures.rb
new file mode 100644
index 0000000000..ef302fc0a0
--- /dev/null
+++ b/activerecord/lib/active_record/fixtures.rb
@@ -0,0 +1,1070 @@
+# frozen_string_literal: true
+
+require "erb"
+require "yaml"
+require "zlib"
+require "set"
+require "active_support/dependencies"
+require "active_support/core_ext/digest/uuid"
+require_relative "fixture_set/file"
+require_relative "errors"
+
+module ActiveRecord
+ class FixtureClassNotFound < ActiveRecord::ActiveRecordError #:nodoc:
+ end
+
+ # \Fixtures are a way of organizing data that you want to test against; in short, sample data.
+ #
+ # They are stored in YAML files, one file per model, which are placed in the directory
+ # appointed by <tt>ActiveSupport::TestCase.fixture_path=(path)</tt> (this is automatically
+ # configured for Rails, so you can just put your files in <tt><your-rails-app>/test/fixtures/</tt>).
+ # The fixture file ends with the +.yml+ file extension, for example:
+ # <tt><your-rails-app>/test/fixtures/web_sites.yml</tt>).
+ #
+ # The format of a fixture file looks like this:
+ #
+ # rubyonrails:
+ # id: 1
+ # name: Ruby on Rails
+ # url: http://www.rubyonrails.org
+ #
+ # google:
+ # id: 2
+ # name: Google
+ # url: http://www.google.com
+ #
+ # This fixture file includes two fixtures. Each YAML fixture (ie. record) is given a name and
+ # is followed by an indented list of key/value pairs in the "key: value" format. Records are
+ # separated by a blank line for your viewing pleasure.
+ #
+ # Note: Fixtures are unordered. If you want ordered fixtures, use the omap YAML type.
+ # See http://yaml.org/type/omap.html
+ # for the specification. You will need ordered fixtures when you have foreign key constraints
+ # on keys in the same table. This is commonly needed for tree structures. Example:
+ #
+ # --- !omap
+ # - parent:
+ # id: 1
+ # parent_id: NULL
+ # title: Parent
+ # - child:
+ # id: 2
+ # parent_id: 1
+ # title: Child
+ #
+ # = Using Fixtures in Test Cases
+ #
+ # Since fixtures are a testing construct, we use them in our unit and functional tests. There
+ # are two ways to use the fixtures, but first let's take a look at a sample unit test:
+ #
+ # require 'test_helper'
+ #
+ # class WebSiteTest < ActiveSupport::TestCase
+ # test "web_site_count" do
+ # assert_equal 2, WebSite.count
+ # end
+ # end
+ #
+ # By default, +test_helper.rb+ will load all of your fixtures into your test
+ # database, so this test will succeed.
+ #
+ # The testing environment will automatically load all the fixtures into the database before each
+ # test. To ensure consistent data, the environment deletes the fixtures before running the load.
+ #
+ # In addition to being available in the database, the fixture's data may also be accessed by
+ # using a special dynamic method, which has the same name as the model.
+ #
+ # Passing in a fixture name to this dynamic method returns the fixture matching this name:
+ #
+ # test "find one" do
+ # assert_equal "Ruby on Rails", web_sites(:rubyonrails).name
+ # end
+ #
+ # Passing in multiple fixture names returns all fixtures matching these names:
+ #
+ # test "find all by name" do
+ # assert_equal 2, web_sites(:rubyonrails, :google).length
+ # end
+ #
+ # Passing in no arguments returns all fixtures:
+ #
+ # test "find all" do
+ # assert_equal 2, web_sites.length
+ # end
+ #
+ # Passing in any fixture name that does not exist will raise <tt>StandardError</tt>:
+ #
+ # test "find by name that does not exist" do
+ # assert_raise(StandardError) { web_sites(:reddit) }
+ # end
+ #
+ # Alternatively, you may enable auto-instantiation of the fixture data. For instance, take the
+ # following tests:
+ #
+ # test "find_alt_method_1" do
+ # assert_equal "Ruby on Rails", @web_sites['rubyonrails']['name']
+ # end
+ #
+ # test "find_alt_method_2" do
+ # assert_equal "Ruby on Rails", @rubyonrails.name
+ # end
+ #
+ # In order to use these methods to access fixtured data within your test cases, you must specify one of the
+ # following in your ActiveSupport::TestCase-derived class:
+ #
+ # - to fully enable instantiated fixtures (enable alternate methods #1 and #2 above)
+ # self.use_instantiated_fixtures = true
+ #
+ # - create only the hash for the fixtures, do not 'find' each instance (enable alternate method #1 only)
+ # self.use_instantiated_fixtures = :no_instances
+ #
+ # Using either of these alternate methods incurs a performance hit, as the fixtured data must be fully
+ # traversed in the database to create the fixture hash and/or instance variables. This is expensive for
+ # large sets of fixtured data.
+ #
+ # = Dynamic fixtures with ERB
+ #
+ # Sometimes you don't care about the content of the fixtures as much as you care about the volume.
+ # In these cases, you can mix ERB in with your YAML fixtures to create a bunch of fixtures for load
+ # testing, like:
+ #
+ # <% 1.upto(1000) do |i| %>
+ # fix_<%= i %>:
+ # id: <%= i %>
+ # name: guy_<%= i %>
+ # <% end %>
+ #
+ # This will create 1000 very simple fixtures.
+ #
+ # Using ERB, you can also inject dynamic values into your fixtures with inserts like
+ # <tt><%= Date.today.strftime("%Y-%m-%d") %></tt>.
+ # This is however a feature to be used with some caution. The point of fixtures are that they're
+ # stable units of predictable sample data. If you feel that you need to inject dynamic values, then
+ # perhaps you should reexamine whether your application is properly testable. Hence, dynamic values
+ # in fixtures are to be considered a code smell.
+ #
+ # Helper methods defined in a fixture will not be available in other fixtures, to prevent against
+ # unwanted inter-test dependencies. Methods used by multiple fixtures should be defined in a module
+ # that is included in ActiveRecord::FixtureSet.context_class.
+ #
+ # - define a helper method in `test_helper.rb`
+ # module FixtureFileHelpers
+ # def file_sha(path)
+ # Digest::SHA2.hexdigest(File.read(Rails.root.join('test/fixtures', path)))
+ # end
+ # end
+ # ActiveRecord::FixtureSet.context_class.include FixtureFileHelpers
+ #
+ # - use the helper method in a fixture
+ # photo:
+ # name: kitten.png
+ # sha: <%= file_sha 'files/kitten.png' %>
+ #
+ # = Transactional Tests
+ #
+ # Test cases can use begin+rollback to isolate their changes to the database instead of having to
+ # delete+insert for every test case.
+ #
+ # class FooTest < ActiveSupport::TestCase
+ # self.use_transactional_tests = true
+ #
+ # test "godzilla" do
+ # assert !Foo.all.empty?
+ # Foo.destroy_all
+ # assert Foo.all.empty?
+ # end
+ #
+ # test "godzilla aftermath" do
+ # assert !Foo.all.empty?
+ # end
+ # end
+ #
+ # If you preload your test database with all fixture data (probably in the rake task) and use
+ # transactional tests, then you may omit all fixtures declarations in your test cases since
+ # all the data's already there and every case rolls back its changes.
+ #
+ # In order to use instantiated fixtures with preloaded data, set +self.pre_loaded_fixtures+ to
+ # true. This will provide access to fixture data for every table that has been loaded through
+ # fixtures (depending on the value of +use_instantiated_fixtures+).
+ #
+ # When *not* to use transactional tests:
+ #
+ # 1. You're testing whether a transaction works correctly. Nested transactions don't commit until
+ # all parent transactions commit, particularly, the fixtures transaction which is begun in setup
+ # and rolled back in teardown. Thus, you won't be able to verify
+ # the results of your transaction until Active Record supports nested transactions or savepoints (in progress).
+ # 2. Your database does not support transactions. Every Active Record database supports transactions except MySQL MyISAM.
+ # Use InnoDB, MaxDB, or NDB instead.
+ #
+ # = Advanced Fixtures
+ #
+ # Fixtures that don't specify an ID get some extra features:
+ #
+ # * Stable, autogenerated IDs
+ # * Label references for associations (belongs_to, has_one, has_many)
+ # * HABTM associations as inline lists
+ #
+ # There are some more advanced features available even if the id is specified:
+ #
+ # * Autofilled timestamp columns
+ # * Fixture label interpolation
+ # * Support for YAML defaults
+ #
+ # == Stable, Autogenerated IDs
+ #
+ # Here, have a monkey fixture:
+ #
+ # george:
+ # id: 1
+ # name: George the Monkey
+ #
+ # reginald:
+ # id: 2
+ # name: Reginald the Pirate
+ #
+ # Each of these fixtures has two unique identifiers: one for the database
+ # and one for the humans. Why don't we generate the primary key instead?
+ # Hashing each fixture's label yields a consistent ID:
+ #
+ # george: # generated id: 503576764
+ # name: George the Monkey
+ #
+ # reginald: # generated id: 324201669
+ # name: Reginald the Pirate
+ #
+ # Active Record looks at the fixture's model class, discovers the correct
+ # primary key, and generates it right before inserting the fixture
+ # into the database.
+ #
+ # The generated ID for a given label is constant, so we can discover
+ # any fixture's ID without loading anything, as long as we know the label.
+ #
+ # == Label references for associations (belongs_to, has_one, has_many)
+ #
+ # Specifying foreign keys in fixtures can be very fragile, not to
+ # mention difficult to read. Since Active Record can figure out the ID of
+ # any fixture from its label, you can specify FK's by label instead of ID.
+ #
+ # === belongs_to
+ #
+ # Let's break out some more monkeys and pirates.
+ #
+ # ### in pirates.yml
+ #
+ # reginald:
+ # id: 1
+ # name: Reginald the Pirate
+ # monkey_id: 1
+ #
+ # ### in monkeys.yml
+ #
+ # george:
+ # id: 1
+ # name: George the Monkey
+ # pirate_id: 1
+ #
+ # Add a few more monkeys and pirates and break this into multiple files,
+ # and it gets pretty hard to keep track of what's going on. Let's
+ # use labels instead of IDs:
+ #
+ # ### in pirates.yml
+ #
+ # reginald:
+ # name: Reginald the Pirate
+ # monkey: george
+ #
+ # ### in monkeys.yml
+ #
+ # george:
+ # name: George the Monkey
+ # pirate: reginald
+ #
+ # Pow! All is made clear. Active Record reflects on the fixture's model class,
+ # finds all the +belongs_to+ associations, and allows you to specify
+ # a target *label* for the *association* (monkey: george) rather than
+ # a target *id* for the *FK* (<tt>monkey_id: 1</tt>).
+ #
+ # ==== Polymorphic belongs_to
+ #
+ # Supporting polymorphic relationships is a little bit more complicated, since
+ # Active Record needs to know what type your association is pointing at. Something
+ # like this should look familiar:
+ #
+ # ### in fruit.rb
+ #
+ # belongs_to :eater, polymorphic: true
+ #
+ # ### in fruits.yml
+ #
+ # apple:
+ # id: 1
+ # name: apple
+ # eater_id: 1
+ # eater_type: Monkey
+ #
+ # Can we do better? You bet!
+ #
+ # apple:
+ # eater: george (Monkey)
+ #
+ # Just provide the polymorphic target type and Active Record will take care of the rest.
+ #
+ # === has_and_belongs_to_many
+ #
+ # Time to give our monkey some fruit.
+ #
+ # ### in monkeys.yml
+ #
+ # george:
+ # id: 1
+ # name: George the Monkey
+ #
+ # ### in fruits.yml
+ #
+ # apple:
+ # id: 1
+ # name: apple
+ #
+ # orange:
+ # id: 2
+ # name: orange
+ #
+ # grape:
+ # id: 3
+ # name: grape
+ #
+ # ### in fruits_monkeys.yml
+ #
+ # apple_george:
+ # fruit_id: 1
+ # monkey_id: 1
+ #
+ # orange_george:
+ # fruit_id: 2
+ # monkey_id: 1
+ #
+ # grape_george:
+ # fruit_id: 3
+ # monkey_id: 1
+ #
+ # Let's make the HABTM fixture go away.
+ #
+ # ### in monkeys.yml
+ #
+ # george:
+ # id: 1
+ # name: George the Monkey
+ # fruits: apple, orange, grape
+ #
+ # ### in fruits.yml
+ #
+ # apple:
+ # name: apple
+ #
+ # orange:
+ # name: orange
+ #
+ # grape:
+ # name: grape
+ #
+ # Zap! No more fruits_monkeys.yml file. We've specified the list of fruits
+ # on George's fixture, but we could've just as easily specified a list
+ # of monkeys on each fruit. As with +belongs_to+, Active Record reflects on
+ # the fixture's model class and discovers the +has_and_belongs_to_many+
+ # associations.
+ #
+ # == Autofilled Timestamp Columns
+ #
+ # If your table/model specifies any of Active Record's
+ # standard timestamp columns (+created_at+, +created_on+, +updated_at+, +updated_on+),
+ # they will automatically be set to <tt>Time.now</tt>.
+ #
+ # If you've set specific values, they'll be left alone.
+ #
+ # == Fixture label interpolation
+ #
+ # The label of the current fixture is always available as a column value:
+ #
+ # geeksomnia:
+ # name: Geeksomnia's Account
+ # subdomain: $LABEL
+ # email: $LABEL@email.com
+ #
+ # Also, sometimes (like when porting older join table fixtures) you'll need
+ # to be able to get a hold of the identifier for a given label. ERB
+ # to the rescue:
+ #
+ # george_reginald:
+ # monkey_id: <%= ActiveRecord::FixtureSet.identify(:reginald) %>
+ # pirate_id: <%= ActiveRecord::FixtureSet.identify(:george) %>
+ #
+ # == Support for YAML defaults
+ #
+ # You can set and reuse defaults in your fixtures YAML file.
+ # This is the same technique used in the +database.yml+ file to specify
+ # defaults:
+ #
+ # DEFAULTS: &DEFAULTS
+ # created_on: <%= 3.weeks.ago.to_s(:db) %>
+ #
+ # first:
+ # name: Smurf
+ # <<: *DEFAULTS
+ #
+ # second:
+ # name: Fraggle
+ # <<: *DEFAULTS
+ #
+ # Any fixture labeled "DEFAULTS" is safely ignored.
+ #
+ # == Configure the fixture model class
+ #
+ # It's possible to set the fixture's model class directly in the YAML file.
+ # This is helpful when fixtures are loaded outside tests and
+ # +set_fixture_class+ is not available (e.g.
+ # when running <tt>rails db:fixtures:load</tt>).
+ #
+ # _fixture:
+ # model_class: User
+ # david:
+ # name: David
+ #
+ # Any fixtures labeled "_fixture" are safely ignored.
+ class FixtureSet
+ #--
+ # An instance of FixtureSet is normally stored in a single YAML file and
+ # possibly in a folder with the same name.
+ #++
+
+ MAX_ID = 2**30 - 1
+
+ @@all_cached_fixtures = Hash.new { |h, k| h[k] = {} }
+
+ def self.default_fixture_model_name(fixture_set_name, config = ActiveRecord::Base) # :nodoc:
+ config.pluralize_table_names ?
+ fixture_set_name.singularize.camelize :
+ fixture_set_name.camelize
+ end
+
+ def self.default_fixture_table_name(fixture_set_name, config = ActiveRecord::Base) # :nodoc:
+ "#{ config.table_name_prefix }"\
+ "#{ fixture_set_name.tr('/', '_') }"\
+ "#{ config.table_name_suffix }".to_sym
+ end
+
+ def self.reset_cache
+ @@all_cached_fixtures.clear
+ end
+
+ def self.cache_for_connection(connection)
+ @@all_cached_fixtures[connection]
+ end
+
+ def self.fixture_is_cached?(connection, table_name)
+ cache_for_connection(connection)[table_name]
+ end
+
+ def self.cached_fixtures(connection, keys_to_fetch = nil)
+ if keys_to_fetch
+ cache_for_connection(connection).values_at(*keys_to_fetch)
+ else
+ cache_for_connection(connection).values
+ end
+ end
+
+ def self.cache_fixtures(connection, fixtures_map)
+ cache_for_connection(connection).update(fixtures_map)
+ end
+
+ def self.instantiate_fixtures(object, fixture_set, load_instances = true)
+ if load_instances
+ fixture_set.each do |fixture_name, fixture|
+ begin
+ object.instance_variable_set "@#{fixture_name}", fixture.find
+ rescue FixtureClassNotFound
+ nil
+ end
+ end
+ end
+ end
+
+ def self.instantiate_all_loaded_fixtures(object, load_instances = true)
+ all_loaded_fixtures.each_value do |fixture_set|
+ instantiate_fixtures(object, fixture_set, load_instances)
+ end
+ end
+
+ cattr_accessor :all_loaded_fixtures, default: {}
+
+ class ClassCache
+ def initialize(class_names, config)
+ @class_names = class_names.stringify_keys
+ @config = config
+
+ # Remove string values that aren't constants or subclasses of AR
+ @class_names.delete_if { |klass_name, klass| !insert_class(@class_names, klass_name, klass) }
+ end
+
+ def [](fs_name)
+ @class_names.fetch(fs_name) {
+ klass = default_fixture_model(fs_name, @config).safe_constantize
+ insert_class(@class_names, fs_name, klass)
+ }
+ end
+
+ private
+
+ def insert_class(class_names, name, klass)
+ # We only want to deal with AR objects.
+ if klass && klass < ActiveRecord::Base
+ class_names[name] = klass
+ else
+ class_names[name] = nil
+ end
+ end
+
+ def default_fixture_model(fs_name, config)
+ ActiveRecord::FixtureSet.default_fixture_model_name(fs_name, config)
+ end
+ end
+
+ def self.create_fixtures(fixtures_directory, fixture_set_names, class_names = {}, config = ActiveRecord::Base)
+ fixture_set_names = Array(fixture_set_names).map(&:to_s)
+ class_names = ClassCache.new class_names, config
+
+ # FIXME: Apparently JK uses this.
+ connection = block_given? ? yield : ActiveRecord::Base.connection
+
+ files_to_read = fixture_set_names.reject { |fs_name|
+ fixture_is_cached?(connection, fs_name)
+ }
+
+ unless files_to_read.empty?
+ connection.disable_referential_integrity do
+ fixtures_map = {}
+
+ fixture_sets = files_to_read.map do |fs_name|
+ klass = class_names[fs_name]
+ conn = klass ? klass.connection : connection
+ fixtures_map[fs_name] = new( # ActiveRecord::FixtureSet.new
+ conn,
+ fs_name,
+ klass,
+ ::File.join(fixtures_directory, fs_name))
+ end
+
+ update_all_loaded_fixtures fixtures_map
+
+ connection.transaction(requires_new: true) do
+ deleted_tables = Hash.new { |h, k| h[k] = Set.new }
+ fixture_sets.each do |fs|
+ conn = fs.model_class.respond_to?(:connection) ? fs.model_class.connection : connection
+ table_rows = fs.table_rows
+
+ table_rows.each_key do |table|
+ unless deleted_tables[conn].include? table
+ conn.delete "DELETE FROM #{conn.quote_table_name(table)}", "Fixture Delete"
+ end
+ deleted_tables[conn] << table
+ end
+
+ table_rows.each do |fixture_set_name, rows|
+ conn.insert_fixtures(rows, fixture_set_name)
+ end
+
+ # Cap primary key sequences to max(pk).
+ if conn.respond_to?(:reset_pk_sequence!)
+ conn.reset_pk_sequence!(fs.table_name)
+ end
+ end
+ end
+
+ cache_fixtures(connection, fixtures_map)
+ end
+ end
+ cached_fixtures(connection, fixture_set_names)
+ end
+
+ # Returns a consistent, platform-independent identifier for +label+.
+ # Integer identifiers are values less than 2^30. UUIDs are RFC 4122 version 5 SHA-1 hashes.
+ def self.identify(label, column_type = :integer)
+ if column_type == :uuid
+ Digest::UUID.uuid_v5(Digest::UUID::OID_NAMESPACE, label.to_s)
+ else
+ Zlib.crc32(label.to_s) % MAX_ID
+ end
+ end
+
+ # Superclass for the evaluation contexts used by ERB fixtures.
+ def self.context_class
+ @context_class ||= Class.new
+ end
+
+ def self.update_all_loaded_fixtures(fixtures_map) # :nodoc:
+ all_loaded_fixtures.update(fixtures_map)
+ end
+
+ attr_reader :table_name, :name, :fixtures, :model_class, :config
+
+ def initialize(connection, name, class_name, path, config = ActiveRecord::Base)
+ @name = name
+ @path = path
+ @config = config
+
+ self.model_class = class_name
+
+ @fixtures = read_fixture_files(path)
+
+ @connection = connection
+
+ @table_name = (model_class.respond_to?(:table_name) ?
+ model_class.table_name :
+ self.class.default_fixture_table_name(name, config))
+ end
+
+ def [](x)
+ fixtures[x]
+ end
+
+ def []=(k, v)
+ fixtures[k] = v
+ end
+
+ def each(&block)
+ fixtures.each(&block)
+ end
+
+ def size
+ fixtures.size
+ end
+
+ # Returns a hash of rows to be inserted. The key is the table, the value is
+ # a list of rows to insert to that table.
+ def table_rows
+ now = config.default_timezone == :utc ? Time.now.utc : Time.now
+
+ # allow a standard key to be used for doing defaults in YAML
+ fixtures.delete("DEFAULTS")
+
+ # track any join tables we need to insert later
+ rows = Hash.new { |h, table| h[table] = [] }
+
+ rows[table_name] = fixtures.map do |label, fixture|
+ row = fixture.to_hash
+
+ if model_class
+ # fill in timestamp columns if they aren't specified and the model is set to record_timestamps
+ if model_class.record_timestamps
+ timestamp_column_names.each do |c_name|
+ row[c_name] = now unless row.key?(c_name)
+ end
+ end
+
+ # interpolate the fixture label
+ row.each do |key, value|
+ row[key] = value.gsub("$LABEL", label.to_s) if value.is_a?(String)
+ end
+
+ # generate a primary key if necessary
+ if has_primary_key_column? && !row.include?(primary_key_name)
+ row[primary_key_name] = ActiveRecord::FixtureSet.identify(label, primary_key_type)
+ end
+
+ # Resolve enums
+ model_class.defined_enums.each do |name, values|
+ if row.include?(name)
+ row[name] = values.fetch(row[name], row[name])
+ end
+ end
+
+ # If STI is used, find the correct subclass for association reflection
+ reflection_class =
+ if row.include?(inheritance_column_name)
+ row[inheritance_column_name].constantize rescue model_class
+ else
+ model_class
+ end
+
+ reflection_class._reflections.each_value do |association|
+ case association.macro
+ when :belongs_to
+ # Do not replace association name with association foreign key if they are named the same
+ fk_name = (association.options[:foreign_key] || "#{association.name}_id").to_s
+
+ if association.name.to_s != fk_name && value = row.delete(association.name.to_s)
+ if association.polymorphic? && value.sub!(/\s*\(([^\)]*)\)\s*$/, "")
+ # support polymorphic belongs_to as "label (Type)"
+ row[association.foreign_type] = $1
+ end
+
+ fk_type = reflection_class.type_for_attribute(fk_name).type
+ row[fk_name] = ActiveRecord::FixtureSet.identify(value, fk_type)
+ end
+ when :has_many
+ if association.options[:through]
+ add_join_records(rows, row, HasManyThroughProxy.new(association))
+ end
+ end
+ end
+ end
+
+ row
+ end
+ rows
+ end
+
+ class ReflectionProxy # :nodoc:
+ def initialize(association)
+ @association = association
+ end
+
+ def join_table
+ @association.join_table
+ end
+
+ def name
+ @association.name
+ end
+
+ def primary_key_type
+ @association.klass.type_for_attribute(@association.klass.primary_key).type
+ end
+ end
+
+ class HasManyThroughProxy < ReflectionProxy # :nodoc:
+ def rhs_key
+ @association.foreign_key
+ end
+
+ def lhs_key
+ @association.through_reflection.foreign_key
+ end
+
+ def join_table
+ @association.through_reflection.table_name
+ end
+ end
+
+ private
+ def primary_key_name
+ @primary_key_name ||= model_class && model_class.primary_key
+ end
+
+ def primary_key_type
+ @primary_key_type ||= model_class && model_class.type_for_attribute(model_class.primary_key).type
+ end
+
+ def add_join_records(rows, row, association)
+ # This is the case when the join table has no fixtures file
+ if (targets = row.delete(association.name.to_s))
+ table_name = association.join_table
+ column_type = association.primary_key_type
+ lhs_key = association.lhs_key
+ rhs_key = association.rhs_key
+
+ targets = targets.is_a?(Array) ? targets : targets.split(/\s*,\s*/)
+ rows[table_name].concat targets.map { |target|
+ { lhs_key => row[primary_key_name],
+ rhs_key => ActiveRecord::FixtureSet.identify(target, column_type) }
+ }
+ end
+ end
+
+ def has_primary_key_column?
+ @has_primary_key_column ||= primary_key_name &&
+ model_class.columns.any? { |c| c.name == primary_key_name }
+ end
+
+ def timestamp_column_names
+ @timestamp_column_names ||=
+ %w(created_at created_on updated_at updated_on) & column_names
+ end
+
+ def inheritance_column_name
+ @inheritance_column_name ||= model_class && model_class.inheritance_column
+ end
+
+ def column_names
+ @column_names ||= @connection.columns(@table_name).collect(&:name)
+ end
+
+ def model_class=(class_name)
+ if class_name.is_a?(Class) # TODO: Should be an AR::Base type class, or any?
+ @model_class = class_name
+ else
+ @model_class = class_name.safe_constantize if class_name
+ end
+ end
+
+ # Loads the fixtures from the YAML file at +path+.
+ # If the file sets the +model_class+ and current instance value is not set,
+ # it uses the file value.
+ def read_fixture_files(path)
+ yaml_files = Dir["#{path}/{**,*}/*.yml"].select { |f|
+ ::File.file?(f)
+ } + [yaml_file_path(path)]
+
+ yaml_files.each_with_object({}) do |file, fixtures|
+ FixtureSet::File.open(file) do |fh|
+ self.model_class ||= fh.model_class if fh.model_class
+ fh.each do |fixture_name, row|
+ fixtures[fixture_name] = ActiveRecord::Fixture.new(row, model_class)
+ end
+ end
+ end
+ end
+
+ def yaml_file_path(path)
+ "#{path}.yml"
+ end
+ end
+
+ class Fixture #:nodoc:
+ include Enumerable
+
+ class FixtureError < StandardError #:nodoc:
+ end
+
+ class FormatError < FixtureError #:nodoc:
+ end
+
+ attr_reader :model_class, :fixture
+
+ def initialize(fixture, model_class)
+ @fixture = fixture
+ @model_class = model_class
+ end
+
+ def class_name
+ model_class.name if model_class
+ end
+
+ def each
+ fixture.each { |item| yield item }
+ end
+
+ def [](key)
+ fixture[key]
+ end
+
+ alias :to_hash :fixture
+
+ def find
+ if model_class
+ model_class.unscoped do
+ model_class.find(fixture[model_class.primary_key])
+ end
+ else
+ raise FixtureClassNotFound, "No class attached to find."
+ end
+ end
+ end
+end
+
+module ActiveRecord
+ module TestFixtures
+ extend ActiveSupport::Concern
+
+ def before_setup # :nodoc:
+ setup_fixtures
+ super
+ end
+
+ def after_teardown # :nodoc:
+ super
+ teardown_fixtures
+ end
+
+ included do
+ class_attribute :fixture_path, instance_writer: false
+ class_attribute :fixture_table_names, default: []
+ class_attribute :fixture_class_names, default: {}
+ class_attribute :use_transactional_tests, default: true
+ class_attribute :use_instantiated_fixtures, default: false # true, false, or :no_instances
+ class_attribute :pre_loaded_fixtures, default: false
+ class_attribute :config, default: ActiveRecord::Base
+ end
+
+ module ClassMethods
+ # Sets the model class for a fixture when the class name cannot be inferred from the fixture name.
+ #
+ # Examples:
+ #
+ # set_fixture_class some_fixture: SomeModel,
+ # 'namespaced/fixture' => Another::Model
+ #
+ # The keys must be the fixture names, that coincide with the short paths to the fixture files.
+ def set_fixture_class(class_names = {})
+ self.fixture_class_names = fixture_class_names.merge(class_names.stringify_keys)
+ end
+
+ def fixtures(*fixture_set_names)
+ if fixture_set_names.first == :all
+ fixture_set_names = Dir["#{fixture_path}/{**,*}/*.{yml}"].uniq
+ fixture_set_names.map! { |f| f[(fixture_path.to_s.size + 1)..-5] }
+ else
+ fixture_set_names = fixture_set_names.flatten.map(&:to_s)
+ end
+
+ self.fixture_table_names |= fixture_set_names
+ setup_fixture_accessors(fixture_set_names)
+ end
+
+ def setup_fixture_accessors(fixture_set_names = nil)
+ fixture_set_names = Array(fixture_set_names || fixture_table_names)
+ methods = Module.new do
+ fixture_set_names.each do |fs_name|
+ fs_name = fs_name.to_s
+ accessor_name = fs_name.tr("/", "_").to_sym
+
+ define_method(accessor_name) do |*fixture_names|
+ force_reload = fixture_names.pop if fixture_names.last == true || fixture_names.last == :reload
+ return_single_record = fixture_names.size == 1
+ fixture_names = @loaded_fixtures[fs_name].fixtures.keys if fixture_names.empty?
+
+ @fixture_cache[fs_name] ||= {}
+
+ instances = fixture_names.map do |f_name|
+ f_name = f_name.to_s if f_name.is_a?(Symbol)
+ @fixture_cache[fs_name].delete(f_name) if force_reload
+
+ if @loaded_fixtures[fs_name][f_name]
+ @fixture_cache[fs_name][f_name] ||= @loaded_fixtures[fs_name][f_name].find
+ else
+ raise StandardError, "No fixture named '#{f_name}' found for fixture set '#{fs_name}'"
+ end
+ end
+
+ return_single_record ? instances.first : instances
+ end
+ private accessor_name
+ end
+ end
+ include methods
+ end
+
+ def uses_transaction(*methods)
+ @uses_transaction = [] unless defined?(@uses_transaction)
+ @uses_transaction.concat methods.map(&:to_s)
+ end
+
+ def uses_transaction?(method)
+ @uses_transaction = [] unless defined?(@uses_transaction)
+ @uses_transaction.include?(method.to_s)
+ end
+ end
+
+ def run_in_transaction?
+ use_transactional_tests &&
+ !self.class.uses_transaction?(method_name)
+ end
+
+ def setup_fixtures(config = ActiveRecord::Base)
+ if pre_loaded_fixtures && !use_transactional_tests
+ raise RuntimeError, "pre_loaded_fixtures requires use_transactional_tests"
+ end
+
+ @fixture_cache = {}
+ @fixture_connections = []
+ @@already_loaded_fixtures ||= {}
+ @connection_subscriber = nil
+
+ # Load fixtures once and begin transaction.
+ if run_in_transaction?
+ if @@already_loaded_fixtures[self.class]
+ @loaded_fixtures = @@already_loaded_fixtures[self.class]
+ else
+ @loaded_fixtures = load_fixtures(config)
+ @@already_loaded_fixtures[self.class] = @loaded_fixtures
+ end
+
+ # Begin transactions for connections already established
+ @fixture_connections = enlist_fixture_connections
+ @fixture_connections.each do |connection|
+ connection.begin_transaction joinable: false
+ connection.pool.lock_thread = true
+ end
+
+ # When connections are established in the future, begin a transaction too
+ @connection_subscriber = ActiveSupport::Notifications.subscribe("!connection.active_record") do |_, _, _, _, payload|
+ spec_name = payload[:spec_name] if payload.key?(:spec_name)
+
+ if spec_name
+ begin
+ connection = ActiveRecord::Base.connection_handler.retrieve_connection(spec_name)
+ rescue ConnectionNotEstablished
+ connection = nil
+ end
+
+ if connection && !@fixture_connections.include?(connection)
+ connection.begin_transaction joinable: false
+ connection.pool.lock_thread = true
+ @fixture_connections << connection
+ end
+ end
+ end
+
+ # Load fixtures for every test.
+ else
+ ActiveRecord::FixtureSet.reset_cache
+ @@already_loaded_fixtures[self.class] = nil
+ @loaded_fixtures = load_fixtures(config)
+ end
+
+ # Instantiate fixtures for every test if requested.
+ instantiate_fixtures if use_instantiated_fixtures
+ end
+
+ def teardown_fixtures
+ # Rollback changes if a transaction is active.
+ if run_in_transaction?
+ ActiveSupport::Notifications.unsubscribe(@connection_subscriber) if @connection_subscriber
+ @fixture_connections.each do |connection|
+ connection.rollback_transaction if connection.transaction_open?
+ connection.pool.lock_thread = false
+ end
+ @fixture_connections.clear
+ else
+ ActiveRecord::FixtureSet.reset_cache
+ end
+
+ ActiveRecord::Base.clear_active_connections!
+ end
+
+ def enlist_fixture_connections
+ ActiveRecord::Base.connection_handler.connection_pool_list.map(&:connection)
+ end
+
+ private
+ def load_fixtures(config)
+ fixtures = ActiveRecord::FixtureSet.create_fixtures(fixture_path, fixture_table_names, fixture_class_names, config)
+ Hash[fixtures.map { |f| [f.name, f] }]
+ end
+
+ def instantiate_fixtures
+ if pre_loaded_fixtures
+ raise RuntimeError, "Load fixtures before instantiating them." if ActiveRecord::FixtureSet.all_loaded_fixtures.empty?
+ ActiveRecord::FixtureSet.instantiate_all_loaded_fixtures(self, load_instances?)
+ else
+ raise RuntimeError, "Load fixtures before instantiating them." if @loaded_fixtures.nil?
+ @loaded_fixtures.each_value do |fixture_set|
+ ActiveRecord::FixtureSet.instantiate_fixtures(self, fixture_set, load_instances?)
+ end
+ end
+ end
+
+ def load_instances?
+ use_instantiated_fixtures != :no_instances
+ end
+ end
+end
+
+class ActiveRecord::FixtureSet::RenderContext # :nodoc:
+ def self.create_subclass
+ Class.new ActiveRecord::FixtureSet.context_class do
+ def get_binding
+ binding()
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/gem_version.rb b/activerecord/lib/active_record/gem_version.rb
new file mode 100644
index 0000000000..7ccb57b305
--- /dev/null
+++ b/activerecord/lib/active_record/gem_version.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # Returns the version of the currently loaded Active Record as a <tt>Gem::Version</tt>
+ def self.gem_version
+ Gem::Version.new VERSION::STRING
+ end
+
+ module VERSION
+ MAJOR = 5
+ MINOR = 2
+ TINY = 0
+ PRE = "alpha"
+
+ STRING = [MAJOR, MINOR, TINY, PRE].compact.join(".")
+ end
+end
diff --git a/activerecord/lib/active_record/inheritance.rb b/activerecord/lib/active_record/inheritance.rb
new file mode 100644
index 0000000000..e3deaafeec
--- /dev/null
+++ b/activerecord/lib/active_record/inheritance.rb
@@ -0,0 +1,254 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/hash/indifferent_access"
+
+module ActiveRecord
+ # == Single table inheritance
+ #
+ # Active Record allows inheritance by storing the name of the class in a column that by
+ # default is named "type" (can be changed by overwriting <tt>Base.inheritance_column</tt>).
+ # This means that an inheritance looking like this:
+ #
+ # class Company < ActiveRecord::Base; end
+ # class Firm < Company; end
+ # class Client < Company; end
+ # class PriorityClient < Client; end
+ #
+ # When you do <tt>Firm.create(name: "37signals")</tt>, this record will be saved in
+ # the companies table with type = "Firm". You can then fetch this row again using
+ # <tt>Company.where(name: '37signals').first</tt> and it will return a Firm object.
+ #
+ # Be aware that because the type column is an attribute on the record every new
+ # subclass will instantly be marked as dirty and the type column will be included
+ # in the list of changed attributes on the record. This is different from non
+ # Single Table Inheritance(STI) classes:
+ #
+ # Company.new.changed? # => false
+ # Firm.new.changed? # => true
+ # Firm.new.changes # => {"type"=>["","Firm"]}
+ #
+ # If you don't have a type column defined in your table, single-table inheritance won't
+ # be triggered. In that case, it'll work just like normal subclasses with no special magic
+ # for differentiating between them or reloading the right type with find.
+ #
+ # Note, all the attributes for all the cases are kept in the same table. Read more:
+ # http://www.martinfowler.com/eaaCatalog/singleTableInheritance.html
+ #
+ module Inheritance
+ extend ActiveSupport::Concern
+
+ included do
+ # Determines whether to store the full constant name including namespace when using STI.
+ # This is true, by default.
+ class_attribute :store_full_sti_class, instance_writer: false, default: true
+ end
+
+ module ClassMethods
+ # Determines if one of the attributes passed in is the inheritance column,
+ # and if the inheritance column is attr accessible, it initializes an
+ # instance of the given subclass instead of the base class.
+ def new(*args, &block)
+ if abstract_class? || self == Base
+ raise NotImplementedError, "#{self} is an abstract class and cannot be instantiated."
+ end
+
+ attrs = args.first
+ if has_attribute?(inheritance_column)
+ subclass = subclass_from_attributes(attrs)
+
+ if subclass.nil? && base_class == self
+ subclass = subclass_from_attributes(column_defaults)
+ end
+ end
+
+ if subclass && subclass != self
+ subclass.new(*args, &block)
+ else
+ super
+ end
+ end
+
+ # Returns +true+ if this does not need STI type condition. Returns
+ # +false+ if STI type condition needs to be applied.
+ def descends_from_active_record?
+ if self == Base
+ false
+ elsif superclass.abstract_class?
+ superclass.descends_from_active_record?
+ else
+ superclass == Base || !columns_hash.include?(inheritance_column)
+ end
+ end
+
+ def finder_needs_type_condition? #:nodoc:
+ # This is like this because benchmarking justifies the strange :false stuff
+ :true == (@finder_needs_type_condition ||= descends_from_active_record? ? :false : :true)
+ end
+
+ # Returns the class descending directly from ActiveRecord::Base, or
+ # an abstract class, if any, in the inheritance hierarchy.
+ #
+ # If A extends ActiveRecord::Base, A.base_class will return A. If B descends from A
+ # through some arbitrarily deep hierarchy, B.base_class will return A.
+ #
+ # If B < A and C < B and if A is an abstract_class then both B.base_class
+ # and C.base_class would return B as the answer since A is an abstract_class.
+ def base_class
+ unless self < Base
+ raise ActiveRecordError, "#{name} doesn't belong in a hierarchy descending from ActiveRecord"
+ end
+
+ if superclass == Base || superclass.abstract_class?
+ self
+ else
+ superclass.base_class
+ end
+ end
+
+ # Set this to true if this is an abstract class (see <tt>abstract_class?</tt>).
+ # If you are using inheritance with ActiveRecord and don't want child classes
+ # to utilize the implied STI table name of the parent class, this will need to be true.
+ # For example, given the following:
+ #
+ # class SuperClass < ActiveRecord::Base
+ # self.abstract_class = true
+ # end
+ # class Child < SuperClass
+ # self.table_name = 'the_table_i_really_want'
+ # end
+ #
+ #
+ # <tt>self.abstract_class = true</tt> is required to make <tt>Child<.find,.create, or any Arel method></tt> use <tt>the_table_i_really_want</tt> instead of a table called <tt>super_classes</tt>
+ #
+ attr_accessor :abstract_class
+
+ # Returns whether this class is an abstract class or not.
+ def abstract_class?
+ defined?(@abstract_class) && @abstract_class == true
+ end
+
+ def sti_name
+ store_full_sti_class ? name : name.demodulize
+ end
+
+ def inherited(subclass)
+ subclass.instance_variable_set(:@_type_candidates_cache, Concurrent::Map.new)
+ super
+ end
+
+ protected
+
+ # Returns the class type of the record using the current module as a prefix. So descendants of
+ # MyApp::Business::Account would appear as MyApp::Business::AccountSubclass.
+ def compute_type(type_name)
+ if type_name.start_with?("::".freeze)
+ # If the type is prefixed with a scope operator then we assume that
+ # the type_name is an absolute reference.
+ ActiveSupport::Dependencies.constantize(type_name)
+ else
+ type_candidate = @_type_candidates_cache[type_name]
+ if type_candidate && type_constant = ActiveSupport::Dependencies.safe_constantize(type_candidate)
+ return type_constant
+ end
+
+ # Build a list of candidates to search for
+ candidates = []
+ name.scan(/::|$/) { candidates.unshift "#{$`}::#{type_name}" }
+ candidates << type_name
+
+ candidates.each do |candidate|
+ constant = ActiveSupport::Dependencies.safe_constantize(candidate)
+ if candidate == constant.to_s
+ @_type_candidates_cache[type_name] = candidate
+ return constant
+ end
+ end
+
+ raise NameError.new("uninitialized constant #{candidates.first}", candidates.first)
+ end
+ end
+
+ private
+
+ # Called by +instantiate+ to decide which class to use for a new
+ # record instance. For single-table inheritance, we check the record
+ # for a +type+ column and return the corresponding class.
+ def discriminate_class_for_record(record)
+ if using_single_table_inheritance?(record)
+ find_sti_class(record[inheritance_column])
+ else
+ super
+ end
+ end
+
+ def using_single_table_inheritance?(record)
+ record[inheritance_column].present? && has_attribute?(inheritance_column)
+ end
+
+ def find_sti_class(type_name)
+ type_name = base_class.type_for_attribute(inheritance_column).cast(type_name)
+ subclass = begin
+ if store_full_sti_class
+ ActiveSupport::Dependencies.constantize(type_name)
+ else
+ compute_type(type_name)
+ end
+ rescue NameError
+ raise SubclassNotFound,
+ "The single-table inheritance mechanism failed to locate the subclass: '#{type_name}'. " \
+ "This error is raised because the column '#{inheritance_column}' is reserved for storing the class in case of inheritance. " \
+ "Please rename this column if you didn't intend it to be used for storing the inheritance class " \
+ "or overwrite #{name}.inheritance_column to use another column for that information."
+ end
+ unless subclass == self || descendants.include?(subclass)
+ raise SubclassNotFound, "Invalid single-table inheritance type: #{subclass.name} is not a subclass of #{name}"
+ end
+ subclass
+ end
+
+ def type_condition(table = arel_table)
+ sti_column = arel_attribute(inheritance_column, table)
+ sti_names = ([self] + descendants).map(&:sti_name)
+
+ sti_column.in(sti_names)
+ end
+
+ # Detect the subclass from the inheritance column of attrs. If the inheritance column value
+ # is not self or a valid subclass, raises ActiveRecord::SubclassNotFound
+ def subclass_from_attributes(attrs)
+ attrs = attrs.to_h if attrs.respond_to?(:permitted?)
+ if attrs.is_a?(Hash)
+ subclass_name = attrs[inheritance_column] || attrs[inheritance_column.to_sym]
+
+ if subclass_name.present?
+ find_sti_class(subclass_name)
+ end
+ end
+ end
+ end
+
+ def initialize_dup(other)
+ super
+ ensure_proper_type
+ end
+
+ private
+
+ def initialize_internals_callback
+ super
+ ensure_proper_type
+ end
+
+ # Sets the attribute used for single table inheritance to this class name if this is not the
+ # ActiveRecord::Base descendant.
+ # Considering the hierarchy Reply < Message < ActiveRecord::Base, this makes it possible to
+ # do Reply.new without having to set <tt>Reply[Reply.inheritance_column] = "Reply"</tt> yourself.
+ # No such attribute would be set for objects of the Message class in that example.
+ def ensure_proper_type
+ klass = self.class
+ if klass.finder_needs_type_condition?
+ _write_attribute(klass.inheritance_column, klass.sti_name)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/integration.rb b/activerecord/lib/active_record/integration.rb
new file mode 100644
index 0000000000..6cf26a9792
--- /dev/null
+++ b/activerecord/lib/active_record/integration.rb
@@ -0,0 +1,155 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/string/filters"
+
+module ActiveRecord
+ module Integration
+ extend ActiveSupport::Concern
+
+ included do
+ ##
+ # :singleton-method:
+ # Indicates the format used to generate the timestamp in the cache key, if
+ # versioning is off. Accepts any of the symbols in <tt>Time::DATE_FORMATS</tt>.
+ #
+ # This is +:usec+, by default.
+ class_attribute :cache_timestamp_format, instance_writer: false, default: :usec
+
+ ##
+ # :singleton-method:
+ # Indicates whether to use a stable #cache_key method that is accompanied
+ # by a changing version in the #cache_version method.
+ #
+ # This is +false+, by default until Rails 6.0.
+ class_attribute :cache_versioning, instance_writer: false, default: false
+ end
+
+ # Returns a +String+, which Action Pack uses for constructing a URL to this
+ # object. The default implementation returns this record's id as a +String+,
+ # or +nil+ if this record's unsaved.
+ #
+ # For example, suppose that you have a User model, and that you have a
+ # <tt>resources :users</tt> route. Normally, +user_path+ will
+ # construct a path with the user object's 'id' in it:
+ #
+ # user = User.find_by(name: 'Phusion')
+ # user_path(user) # => "/users/1"
+ #
+ # You can override +to_param+ in your model to make +user_path+ construct
+ # a path using the user's name instead of the user's id:
+ #
+ # class User < ActiveRecord::Base
+ # def to_param # overridden
+ # name
+ # end
+ # end
+ #
+ # user = User.find_by(name: 'Phusion')
+ # user_path(user) # => "/users/Phusion"
+ def to_param
+ # We can't use alias_method here, because method 'id' optimizes itself on the fly.
+ id && id.to_s # Be sure to stringify the id for routes
+ end
+
+ # Returns a stable cache key that can be used to identify this record.
+ #
+ # Product.new.cache_key # => "products/new"
+ # Product.find(5).cache_key # => "products/5"
+ #
+ # If ActiveRecord::Base.cache_versioning is turned off, as it was in Rails 5.1 and earlier,
+ # the cache key will also include a version.
+ #
+ # Product.cache_versioning = false
+ # Person.find(5).cache_key # => "people/5-20071224150000" (updated_at available)
+ def cache_key(*timestamp_names)
+ if new_record?
+ "#{model_name.cache_key}/new"
+ else
+ if cache_version && timestamp_names.none?
+ "#{model_name.cache_key}/#{id}"
+ else
+ timestamp = if timestamp_names.any?
+ ActiveSupport::Deprecation.warn(<<-MSG.squish)
+ Specifying a timestamp name for #cache_key has been deprecated in favor of
+ the explicit #cache_version method that can be overwritten.
+ MSG
+
+ max_updated_column_timestamp(timestamp_names)
+ else
+ max_updated_column_timestamp
+ end
+
+ if timestamp
+ timestamp = timestamp.utc.to_s(cache_timestamp_format)
+ "#{model_name.cache_key}/#{id}-#{timestamp}"
+ else
+ "#{model_name.cache_key}/#{id}"
+ end
+ end
+ end
+ end
+
+ # Returns a cache version that can be used together with the cache key to form
+ # a recyclable caching scheme. By default, the #updated_at column is used for the
+ # cache_version, but this method can be overwritten to return something else.
+ #
+ # Note, this method will return nil if ActiveRecord::Base.cache_versioning is set to
+ # +false+ (which it is by default until Rails 6.0).
+ def cache_version
+ if cache_versioning && timestamp = try(:updated_at)
+ timestamp.utc.to_s(:usec)
+ end
+ end
+
+ # Returns a cache key along with the version.
+ def cache_key_with_version
+ if version = cache_version
+ "#{cache_key}-#{version}"
+ else
+ cache_key
+ end
+ end
+
+ module ClassMethods
+ # Defines your model's +to_param+ method to generate "pretty" URLs
+ # using +method_name+, which can be any attribute or method that
+ # responds to +to_s+.
+ #
+ # class User < ActiveRecord::Base
+ # to_param :name
+ # end
+ #
+ # user = User.find_by(name: 'Fancy Pants')
+ # user.id # => 123
+ # user_path(user) # => "/users/123-fancy-pants"
+ #
+ # Values longer than 20 characters will be truncated. The value
+ # is truncated word by word.
+ #
+ # user = User.find_by(name: 'David Heinemeier Hansson')
+ # user.id # => 125
+ # user_path(user) # => "/users/125-david-heinemeier"
+ #
+ # Because the generated param begins with the record's +id+, it is
+ # suitable for passing to +find+. In a controller, for example:
+ #
+ # params[:id] # => "123-fancy-pants"
+ # User.find(params[:id]).id # => 123
+ def to_param(method_name = nil)
+ if method_name.nil?
+ super()
+ else
+ define_method :to_param do
+ if (default = super()) &&
+ (result = send(method_name).to_s).present? &&
+ (param = result.squish.parameterize.truncate(20, separator: /-/, omission: "")).present?
+ "#{default}-#{param}"
+ else
+ default
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/internal_metadata.rb b/activerecord/lib/active_record/internal_metadata.rb
new file mode 100644
index 0000000000..14795cc815
--- /dev/null
+++ b/activerecord/lib/active_record/internal_metadata.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require_relative "scoping/default"
+require_relative "scoping/named"
+
+module ActiveRecord
+ # This class is used to create a table that keeps track of values and keys such
+ # as which environment migrations were run in.
+ class InternalMetadata < ActiveRecord::Base # :nodoc:
+ class << self
+ def primary_key
+ "key"
+ end
+
+ def table_name
+ "#{table_name_prefix}#{ActiveRecord::Base.internal_metadata_table_name}#{table_name_suffix}"
+ end
+
+ def []=(key, value)
+ find_or_initialize_by(key: key).update_attributes!(value: value)
+ end
+
+ def [](key)
+ where(key: key).pluck(:value).first
+ end
+
+ def table_exists?
+ connection.table_exists?(table_name)
+ end
+
+ # Creates an internal metadata table with columns +key+ and +value+
+ def create_table
+ unless table_exists?
+ key_options = connection.internal_string_options_for_primary_key
+
+ connection.create_table(table_name, id: false) do |t|
+ t.string :key, key_options
+ t.string :value
+ t.timestamps
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/legacy_yaml_adapter.rb b/activerecord/lib/active_record/legacy_yaml_adapter.rb
new file mode 100644
index 0000000000..23644aab8f
--- /dev/null
+++ b/activerecord/lib/active_record/legacy_yaml_adapter.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module LegacyYamlAdapter
+ def self.convert(klass, coder)
+ return coder unless coder.is_a?(Psych::Coder)
+
+ case coder["active_record_yaml_version"]
+ when 1, 2 then coder
+ else
+ if coder["attributes"].is_a?(AttributeSet)
+ Rails420.convert(klass, coder)
+ else
+ Rails41.convert(klass, coder)
+ end
+ end
+ end
+
+ module Rails420
+ def self.convert(klass, coder)
+ attribute_set = coder["attributes"]
+
+ klass.attribute_names.each do |attr_name|
+ attribute = attribute_set[attr_name]
+ if attribute.type.is_a?(Delegator)
+ type_from_klass = klass.type_for_attribute(attr_name)
+ attribute_set[attr_name] = attribute.with_type(type_from_klass)
+ end
+ end
+
+ coder
+ end
+ end
+
+ module Rails41
+ def self.convert(klass, coder)
+ attributes = klass.attributes_builder
+ .build_from_database(coder["attributes"])
+ new_record = coder["attributes"][klass.primary_key].blank?
+
+ {
+ "attributes" => attributes,
+ "new_record" => new_record,
+ }
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/locale/en.yml b/activerecord/lib/active_record/locale/en.yml
new file mode 100644
index 0000000000..0b35027b2b
--- /dev/null
+++ b/activerecord/lib/active_record/locale/en.yml
@@ -0,0 +1,48 @@
+en:
+ # Attributes names common to most models
+ #attributes:
+ #created_at: "Created at"
+ #updated_at: "Updated at"
+
+ # Default error messages
+ errors:
+ messages:
+ required: "must exist"
+ taken: "has already been taken"
+
+ # Active Record models configuration
+ activerecord:
+ errors:
+ messages:
+ record_invalid: "Validation failed: %{errors}"
+ restrict_dependent_destroy:
+ has_one: "Cannot delete record because a dependent %{record} exists"
+ has_many: "Cannot delete record because dependent %{record} exist"
+ # Append your own errors here or at the model/attributes scope.
+
+ # You can define own errors for models or model attributes.
+ # The values :model, :attribute and :value are always available for interpolation.
+ #
+ # For example,
+ # models:
+ # user:
+ # blank: "This is a custom blank message for %{model}: %{attribute}"
+ # attributes:
+ # login:
+ # blank: "This is a custom blank message for User login"
+ # Will define custom blank validation message for User model and
+ # custom blank validation message for login attribute of User model.
+ #models:
+
+ # Translate model names. Used in Model.human_name().
+ #models:
+ # For example,
+ # user: "Dude"
+ # will translate User model name to "Dude"
+
+ # Translate model attribute names. Used in Model.human_attribute_name(attribute).
+ #attributes:
+ # For example,
+ # user:
+ # login: "Handle"
+ # will translate User attribute "login" as "Handle"
diff --git a/activerecord/lib/active_record/locking/optimistic.rb b/activerecord/lib/active_record/locking/optimistic.rb
new file mode 100644
index 0000000000..e1e24e2814
--- /dev/null
+++ b/activerecord/lib/active_record/locking/optimistic.rb
@@ -0,0 +1,212 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Locking
+ # == What is Optimistic Locking
+ #
+ # Optimistic locking allows multiple users to access the same record for edits, and assumes a minimum of
+ # conflicts with the data. It does this by checking whether another process has made changes to a record since
+ # it was opened, an <tt>ActiveRecord::StaleObjectError</tt> exception is thrown if that has occurred
+ # and the update is ignored.
+ #
+ # Check out <tt>ActiveRecord::Locking::Pessimistic</tt> for an alternative.
+ #
+ # == Usage
+ #
+ # Active Record supports optimistic locking if the +lock_version+ field is present. Each update to the
+ # record increments the +lock_version+ column and the locking facilities ensure that records instantiated twice
+ # will let the last one saved raise a +StaleObjectError+ if the first was also updated. Example:
+ #
+ # p1 = Person.find(1)
+ # p2 = Person.find(1)
+ #
+ # p1.first_name = "Michael"
+ # p1.save
+ #
+ # p2.first_name = "should fail"
+ # p2.save # Raises an ActiveRecord::StaleObjectError
+ #
+ # Optimistic locking will also check for stale data when objects are destroyed. Example:
+ #
+ # p1 = Person.find(1)
+ # p2 = Person.find(1)
+ #
+ # p1.first_name = "Michael"
+ # p1.save
+ #
+ # p2.destroy # Raises an ActiveRecord::StaleObjectError
+ #
+ # You're then responsible for dealing with the conflict by rescuing the exception and either rolling back, merging,
+ # or otherwise apply the business logic needed to resolve the conflict.
+ #
+ # This locking mechanism will function inside a single Ruby process. To make it work across all
+ # web requests, the recommended approach is to add +lock_version+ as a hidden field to your form.
+ #
+ # This behavior can be turned off by setting <tt>ActiveRecord::Base.lock_optimistically = false</tt>.
+ # To override the name of the +lock_version+ column, set the <tt>locking_column</tt> class attribute:
+ #
+ # class Person < ActiveRecord::Base
+ # self.locking_column = :lock_person
+ # end
+ #
+ module Optimistic
+ extend ActiveSupport::Concern
+
+ included do
+ class_attribute :lock_optimistically, instance_writer: false, default: true
+ end
+
+ def locking_enabled? #:nodoc:
+ self.class.locking_enabled?
+ end
+
+ private
+
+ def increment_lock
+ lock_col = self.class.locking_column
+ previous_lock_value = send(lock_col)
+ send("#{lock_col}=", previous_lock_value + 1)
+ end
+
+ def _create_record(attribute_names = self.attribute_names, *)
+ if locking_enabled?
+ # We always want to persist the locking version, even if we don't detect
+ # a change from the default, since the database might have no default
+ attribute_names |= [self.class.locking_column]
+ end
+ super
+ end
+
+ def _update_record(attribute_names = self.attribute_names)
+ return super unless locking_enabled?
+ return 0 if attribute_names.empty?
+
+ begin
+ lock_col = self.class.locking_column
+
+ previous_lock_value = read_attribute_before_type_cast(lock_col)
+
+ increment_lock
+
+ attribute_names.push(lock_col)
+
+ relation = self.class.unscoped
+
+ affected_rows = relation.where(
+ self.class.primary_key => id,
+ lock_col => previous_lock_value
+ ).update_all(
+ attributes_for_update(attribute_names).map do |name|
+ [name, _read_attribute(name)]
+ end.to_h
+ )
+
+ unless affected_rows == 1
+ raise ActiveRecord::StaleObjectError.new(self, "update")
+ end
+
+ affected_rows
+
+ # If something went wrong, revert the locking_column value.
+ rescue Exception
+ send("#{lock_col}=", previous_lock_value.to_i)
+
+ raise
+ end
+ end
+
+ def destroy_row
+ affected_rows = super
+
+ if locking_enabled? && affected_rows != 1
+ raise ActiveRecord::StaleObjectError.new(self, "destroy")
+ end
+
+ affected_rows
+ end
+
+ def relation_for_destroy
+ relation = super
+
+ if locking_enabled?
+ locking_column = self.class.locking_column
+ relation = relation.where(locking_column => read_attribute_before_type_cast(locking_column))
+ end
+
+ relation
+ end
+
+ module ClassMethods
+ DEFAULT_LOCKING_COLUMN = "lock_version"
+
+ # Returns true if the +lock_optimistically+ flag is set to true
+ # (which it is, by default) and the table includes the
+ # +locking_column+ column (defaults to +lock_version+).
+ def locking_enabled?
+ lock_optimistically && columns_hash[locking_column]
+ end
+
+ # Set the column to use for optimistic locking. Defaults to +lock_version+.
+ def locking_column=(value)
+ reload_schema_from_cache
+ @locking_column = value.to_s
+ end
+
+ # The version column used for optimistic locking. Defaults to +lock_version+.
+ def locking_column
+ @locking_column = DEFAULT_LOCKING_COLUMN unless defined?(@locking_column)
+ @locking_column
+ end
+
+ # Reset the column used for optimistic locking back to the +lock_version+ default.
+ def reset_locking_column
+ self.locking_column = DEFAULT_LOCKING_COLUMN
+ end
+
+ # Make sure the lock version column gets updated when counters are
+ # updated.
+ def update_counters(id, counters)
+ counters = counters.merge(locking_column => 1) if locking_enabled?
+ super
+ end
+
+ private
+
+ # We need to apply this decorator here, rather than on module inclusion. The closure
+ # created by the matcher would otherwise evaluate for `ActiveRecord::Base`, not the
+ # sub class being decorated. As such, changes to `lock_optimistically`, or
+ # `locking_column` would not be picked up.
+ def inherited(subclass)
+ subclass.class_eval do
+ is_lock_column = ->(name, _) { lock_optimistically && name == locking_column }
+ decorate_matching_attribute_types(is_lock_column, :_optimistic_locking) do |type|
+ LockingType.new(type)
+ end
+ end
+ super
+ end
+ end
+ end
+
+ # In de/serialize we change `nil` to 0, so that we can allow passing
+ # `nil` values to `lock_version`, and not result in `ActiveRecord::StaleObjectError`
+ # during update record.
+ class LockingType < DelegateClass(Type::Value) # :nodoc:
+ def deserialize(value)
+ super.to_i
+ end
+
+ def serialize(value)
+ super.to_i
+ end
+
+ def init_with(coder)
+ __setobj__(coder["subtype"])
+ end
+
+ def encode_with(coder)
+ coder["subtype"] = __getobj__
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/locking/pessimistic.rb b/activerecord/lib/active_record/locking/pessimistic.rb
new file mode 100644
index 0000000000..e939a24ad5
--- /dev/null
+++ b/activerecord/lib/active_record/locking/pessimistic.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Locking
+ # Locking::Pessimistic provides support for row-level locking using
+ # SELECT ... FOR UPDATE and other lock types.
+ #
+ # Chain <tt>ActiveRecord::Base#find</tt> to <tt>ActiveRecord::QueryMethods#lock</tt> to obtain an exclusive
+ # lock on the selected rows:
+ # # select * from accounts where id=1 for update
+ # Account.lock.find(1)
+ #
+ # Call <tt>lock('some locking clause')</tt> to use a database-specific locking clause
+ # of your own such as 'LOCK IN SHARE MODE' or 'FOR UPDATE NOWAIT'. Example:
+ #
+ # Account.transaction do
+ # # select * from accounts where name = 'shugo' limit 1 for update
+ # shugo = Account.where("name = 'shugo'").lock(true).first
+ # yuko = Account.where("name = 'yuko'").lock(true).first
+ # shugo.balance -= 100
+ # shugo.save!
+ # yuko.balance += 100
+ # yuko.save!
+ # end
+ #
+ # You can also use <tt>ActiveRecord::Base#lock!</tt> method to lock one record by id.
+ # This may be better if you don't need to lock every row. Example:
+ #
+ # Account.transaction do
+ # # select * from accounts where ...
+ # accounts = Account.where(...)
+ # account1 = accounts.detect { |account| ... }
+ # account2 = accounts.detect { |account| ... }
+ # # select * from accounts where id=? for update
+ # account1.lock!
+ # account2.lock!
+ # account1.balance -= 100
+ # account1.save!
+ # account2.balance += 100
+ # account2.save!
+ # end
+ #
+ # You can start a transaction and acquire the lock in one go by calling
+ # <tt>with_lock</tt> with a block. The block is called from within
+ # a transaction, the object is already locked. Example:
+ #
+ # account = Account.first
+ # account.with_lock do
+ # # This block is called within a transaction,
+ # # account is already locked.
+ # account.balance -= 100
+ # account.save!
+ # end
+ #
+ # Database-specific information on row locking:
+ # MySQL: http://dev.mysql.com/doc/refman/5.7/en/innodb-locking-reads.html
+ # PostgreSQL: https://www.postgresql.org/docs/current/interactive/sql-select.html#SQL-FOR-UPDATE-SHARE
+ module Pessimistic
+ # Obtain a row lock on this record. Reloads the record to obtain the requested
+ # lock. Pass an SQL locking clause to append the end of the SELECT statement
+ # or pass true for "FOR UPDATE" (the default, an exclusive row lock). Returns
+ # the locked record.
+ def lock!(lock = true)
+ if persisted?
+ if changed?
+ ActiveSupport::Deprecation.warn(<<-MSG.squish)
+ Locking a record with unpersisted changes is deprecated and will raise an
+ exception in Rails 5.2. Use `save` to persist the changes, or `reload` to
+ discard them explicitly.
+ MSG
+ end
+ reload(lock: lock)
+ end
+ self
+ end
+
+ # Wraps the passed block in a transaction, locking the object
+ # before yielding. You can pass the SQL locking clause
+ # as argument (see <tt>lock!</tt>).
+ def with_lock(lock = true)
+ transaction do
+ lock!(lock)
+ yield
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/log_subscriber.rb b/activerecord/lib/active_record/log_subscriber.rb
new file mode 100644
index 0000000000..405f3a30c6
--- /dev/null
+++ b/activerecord/lib/active_record/log_subscriber.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class LogSubscriber < ActiveSupport::LogSubscriber
+ IGNORE_PAYLOAD_NAMES = ["SCHEMA", "EXPLAIN"]
+
+ def self.runtime=(value)
+ ActiveRecord::RuntimeRegistry.sql_runtime = value
+ end
+
+ def self.runtime
+ ActiveRecord::RuntimeRegistry.sql_runtime ||= 0
+ end
+
+ def self.reset_runtime
+ rt, self.runtime = runtime, 0
+ rt
+ end
+
+ def sql(event)
+ self.class.runtime += event.duration
+ return unless logger.debug?
+
+ payload = event.payload
+
+ return if IGNORE_PAYLOAD_NAMES.include?(payload[:name])
+
+ name = "#{payload[:name]} (#{event.duration.round(1)}ms)"
+ name = "CACHE #{name}" if payload[:cached]
+ sql = payload[:sql]
+ binds = nil
+
+ unless (payload[:binds] || []).empty?
+ casted_params = type_casted_binds(payload[:type_casted_binds])
+ binds = " " + payload[:binds].zip(casted_params).map { |attr, value|
+ render_bind(attr, value)
+ }.inspect
+ end
+
+ name = colorize_payload_name(name, payload[:name])
+ sql = color(sql, sql_color(sql), true)
+
+ debug " #{name} #{sql}#{binds}"
+ end
+
+ private
+ def type_casted_binds(casted_binds)
+ casted_binds.respond_to?(:call) ? casted_binds.call : casted_binds
+ end
+
+ def render_bind(attr, value)
+ if attr.is_a?(Array)
+ attr = attr.first
+ elsif attr.type.binary? && attr.value
+ value = "<#{attr.value_for_database.to_s.bytesize} bytes of binary data>"
+ end
+
+ [attr && attr.name, value]
+ end
+
+ def colorize_payload_name(name, payload_name)
+ if payload_name.blank? || payload_name == "SQL" # SQL vs Model Load/Exists
+ color(name, MAGENTA, true)
+ else
+ color(name, CYAN, true)
+ end
+ end
+
+ def sql_color(sql)
+ case sql
+ when /\A\s*rollback/mi
+ RED
+ when /select .*for update/mi, /\A\s*lock/mi
+ WHITE
+ when /\A\s*select/i
+ BLUE
+ when /\A\s*insert/i
+ GREEN
+ when /\A\s*update/i
+ YELLOW
+ when /\A\s*delete/i
+ RED
+ when /transaction\s*\Z/i
+ CYAN
+ else
+ MAGENTA
+ end
+ end
+
+ def logger
+ ActiveRecord::Base.logger
+ end
+ end
+end
+
+ActiveRecord::LogSubscriber.attach_to :active_record
diff --git a/activerecord/lib/active_record/migration.rb b/activerecord/lib/active_record/migration.rb
new file mode 100644
index 0000000000..52ca4671c2
--- /dev/null
+++ b/activerecord/lib/active_record/migration.rb
@@ -0,0 +1,1340 @@
+# frozen_string_literal: true
+
+require "set"
+require "zlib"
+require "active_support/core_ext/module/attribute_accessors"
+
+module ActiveRecord
+ class MigrationError < ActiveRecordError#:nodoc:
+ def initialize(message = nil)
+ message = "\n\n#{message}\n\n" if message
+ super
+ end
+ end
+
+ # Exception that can be raised to stop migrations from being rolled back.
+ # For example the following migration is not reversible.
+ # Rolling back this migration will raise an ActiveRecord::IrreversibleMigration error.
+ #
+ # class IrreversibleMigrationExample < ActiveRecord::Migration[5.0]
+ # def change
+ # create_table :distributors do |t|
+ # t.string :zipcode
+ # end
+ #
+ # execute <<-SQL
+ # ALTER TABLE distributors
+ # ADD CONSTRAINT zipchk
+ # CHECK (char_length(zipcode) = 5) NO INHERIT;
+ # SQL
+ # end
+ # end
+ #
+ # There are two ways to mitigate this problem.
+ #
+ # 1. Define <tt>#up</tt> and <tt>#down</tt> methods instead of <tt>#change</tt>:
+ #
+ # class ReversibleMigrationExample < ActiveRecord::Migration[5.0]
+ # def up
+ # create_table :distributors do |t|
+ # t.string :zipcode
+ # end
+ #
+ # execute <<-SQL
+ # ALTER TABLE distributors
+ # ADD CONSTRAINT zipchk
+ # CHECK (char_length(zipcode) = 5) NO INHERIT;
+ # SQL
+ # end
+ #
+ # def down
+ # execute <<-SQL
+ # ALTER TABLE distributors
+ # DROP CONSTRAINT zipchk
+ # SQL
+ #
+ # drop_table :distributors
+ # end
+ # end
+ #
+ # 2. Use the #reversible method in <tt>#change</tt> method:
+ #
+ # class ReversibleMigrationExample < ActiveRecord::Migration[5.0]
+ # def change
+ # create_table :distributors do |t|
+ # t.string :zipcode
+ # end
+ #
+ # reversible do |dir|
+ # dir.up do
+ # execute <<-SQL
+ # ALTER TABLE distributors
+ # ADD CONSTRAINT zipchk
+ # CHECK (char_length(zipcode) = 5) NO INHERIT;
+ # SQL
+ # end
+ #
+ # dir.down do
+ # execute <<-SQL
+ # ALTER TABLE distributors
+ # DROP CONSTRAINT zipchk
+ # SQL
+ # end
+ # end
+ # end
+ # end
+ class IrreversibleMigration < MigrationError
+ end
+
+ class DuplicateMigrationVersionError < MigrationError#:nodoc:
+ def initialize(version = nil)
+ if version
+ super("Multiple migrations have the version number #{version}.")
+ else
+ super("Duplicate migration version error.")
+ end
+ end
+ end
+
+ class DuplicateMigrationNameError < MigrationError#:nodoc:
+ def initialize(name = nil)
+ if name
+ super("Multiple migrations have the name #{name}.")
+ else
+ super("Duplicate migration name.")
+ end
+ end
+ end
+
+ class UnknownMigrationVersionError < MigrationError #:nodoc:
+ def initialize(version = nil)
+ if version
+ super("No migration with version number #{version}.")
+ else
+ super("Unknown migration version.")
+ end
+ end
+ end
+
+ class IllegalMigrationNameError < MigrationError#:nodoc:
+ def initialize(name = nil)
+ if name
+ super("Illegal name for migration file: #{name}\n\t(only lower case letters, numbers, and '_' allowed).")
+ else
+ super("Illegal name for migration.")
+ end
+ end
+ end
+
+ class PendingMigrationError < MigrationError#:nodoc:
+ def initialize(message = nil)
+ if !message && defined?(Rails.env)
+ super("Migrations are pending. To resolve this issue, run:\n\n bin/rails db:migrate RAILS_ENV=#{::Rails.env}")
+ elsif !message
+ super("Migrations are pending. To resolve this issue, run:\n\n bin/rails db:migrate")
+ else
+ super
+ end
+ end
+ end
+
+ class ConcurrentMigrationError < MigrationError #:nodoc:
+ DEFAULT_MESSAGE = "Cannot run migrations because another migration process is currently running.".freeze
+
+ def initialize(message = DEFAULT_MESSAGE)
+ super
+ end
+ end
+
+ class NoEnvironmentInSchemaError < MigrationError #:nodoc:
+ def initialize
+ msg = "Environment data not found in the schema. To resolve this issue, run: \n\n bin/rails db:environment:set"
+ if defined?(Rails.env)
+ super("#{msg} RAILS_ENV=#{::Rails.env}")
+ else
+ super(msg)
+ end
+ end
+ end
+
+ class ProtectedEnvironmentError < ActiveRecordError #:nodoc:
+ def initialize(env = "production")
+ msg = "You are attempting to run a destructive action against your '#{env}' database.\n".dup
+ msg << "If you are sure you want to continue, run the same command with the environment variable:\n"
+ msg << "DISABLE_DATABASE_ENVIRONMENT_CHECK=1"
+ super(msg)
+ end
+ end
+
+ class EnvironmentMismatchError < ActiveRecordError
+ def initialize(current: nil, stored: nil)
+ msg = "You are attempting to modify a database that was last run in `#{ stored }` environment.\n".dup
+ msg << "You are running in `#{ current }` environment. "
+ msg << "If you are sure you want to continue, first set the environment using:\n\n"
+ msg << " bin/rails db:environment:set"
+ if defined?(Rails.env)
+ super("#{msg} RAILS_ENV=#{::Rails.env}\n\n")
+ else
+ super("#{msg}\n\n")
+ end
+ end
+ end
+
+ # = Active Record Migrations
+ #
+ # Migrations can manage the evolution of a schema used by several physical
+ # databases. It's a solution to the common problem of adding a field to make
+ # a new feature work in your local database, but being unsure of how to
+ # push that change to other developers and to the production server. With
+ # migrations, you can describe the transformations in self-contained classes
+ # that can be checked into version control systems and executed against
+ # another database that might be one, two, or five versions behind.
+ #
+ # Example of a simple migration:
+ #
+ # class AddSsl < ActiveRecord::Migration[5.0]
+ # def up
+ # add_column :accounts, :ssl_enabled, :boolean, default: true
+ # end
+ #
+ # def down
+ # remove_column :accounts, :ssl_enabled
+ # end
+ # end
+ #
+ # This migration will add a boolean flag to the accounts table and remove it
+ # if you're backing out of the migration. It shows how all migrations have
+ # two methods +up+ and +down+ that describes the transformations
+ # required to implement or remove the migration. These methods can consist
+ # of both the migration specific methods like +add_column+ and +remove_column+,
+ # but may also contain regular Ruby code for generating data needed for the
+ # transformations.
+ #
+ # Example of a more complex migration that also needs to initialize data:
+ #
+ # class AddSystemSettings < ActiveRecord::Migration[5.0]
+ # def up
+ # create_table :system_settings do |t|
+ # t.string :name
+ # t.string :label
+ # t.text :value
+ # t.string :type
+ # t.integer :position
+ # end
+ #
+ # SystemSetting.create name: 'notice',
+ # label: 'Use notice?',
+ # value: 1
+ # end
+ #
+ # def down
+ # drop_table :system_settings
+ # end
+ # end
+ #
+ # This migration first adds the +system_settings+ table, then creates the very
+ # first row in it using the Active Record model that relies on the table. It
+ # also uses the more advanced +create_table+ syntax where you can specify a
+ # complete table schema in one block call.
+ #
+ # == Available transformations
+ #
+ # === Creation
+ #
+ # * <tt>create_join_table(table_1, table_2, options)</tt>: Creates a join
+ # table having its name as the lexical order of the first two
+ # arguments. See
+ # ActiveRecord::ConnectionAdapters::SchemaStatements#create_join_table for
+ # details.
+ # * <tt>create_table(name, options)</tt>: Creates a table called +name+ and
+ # makes the table object available to a block that can then add columns to it,
+ # following the same format as +add_column+. See example above. The options hash
+ # is for fragments like "DEFAULT CHARSET=UTF-8" that are appended to the create
+ # table definition.
+ # * <tt>add_column(table_name, column_name, type, options)</tt>: Adds a new column
+ # to the table called +table_name+
+ # named +column_name+ specified to be one of the following types:
+ # <tt>:string</tt>, <tt>:text</tt>, <tt>:integer</tt>, <tt>:float</tt>,
+ # <tt>:decimal</tt>, <tt>:datetime</tt>, <tt>:timestamp</tt>, <tt>:time</tt>,
+ # <tt>:date</tt>, <tt>:binary</tt>, <tt>:boolean</tt>. A default value can be
+ # specified by passing an +options+ hash like <tt>{ default: 11 }</tt>.
+ # Other options include <tt>:limit</tt> and <tt>:null</tt> (e.g.
+ # <tt>{ limit: 50, null: false }</tt>) -- see
+ # ActiveRecord::ConnectionAdapters::TableDefinition#column for details.
+ # * <tt>add_foreign_key(from_table, to_table, options)</tt>: Adds a new
+ # foreign key. +from_table+ is the table with the key column, +to_table+ contains
+ # the referenced primary key.
+ # * <tt>add_index(table_name, column_names, options)</tt>: Adds a new index
+ # with the name of the column. Other options include
+ # <tt>:name</tt>, <tt>:unique</tt> (e.g.
+ # <tt>{ name: 'users_name_index', unique: true }</tt>) and <tt>:order</tt>
+ # (e.g. <tt>{ order: { name: :desc } }</tt>).
+ # * <tt>add_reference(:table_name, :reference_name)</tt>: Adds a new column
+ # +reference_name_id+ by default an integer. See
+ # ActiveRecord::ConnectionAdapters::SchemaStatements#add_reference for details.
+ # * <tt>add_timestamps(table_name, options)</tt>: Adds timestamps (+created_at+
+ # and +updated_at+) columns to +table_name+.
+ #
+ # === Modification
+ #
+ # * <tt>change_column(table_name, column_name, type, options)</tt>: Changes
+ # the column to a different type using the same parameters as add_column.
+ # * <tt>change_column_default(table_name, column_name, default_or_changes)</tt>:
+ # Sets a default value for +column_name+ defined by +default_or_changes+ on
+ # +table_name+. Passing a hash containing <tt>:from</tt> and <tt>:to</tt>
+ # as +default_or_changes+ will make this change reversible in the migration.
+ # * <tt>change_column_null(table_name, column_name, null, default = nil)</tt>:
+ # Sets or removes a +NOT NULL+ constraint on +column_name+. The +null+ flag
+ # indicates whether the value can be +NULL+. See
+ # ActiveRecord::ConnectionAdapters::SchemaStatements#change_column_null for
+ # details.
+ # * <tt>change_table(name, options)</tt>: Allows to make column alterations to
+ # the table called +name+. It makes the table object available to a block that
+ # can then add/remove columns, indexes or foreign keys to it.
+ # * <tt>rename_column(table_name, column_name, new_column_name)</tt>: Renames
+ # a column but keeps the type and content.
+ # * <tt>rename_index(table_name, old_name, new_name)</tt>: Renames an index.
+ # * <tt>rename_table(old_name, new_name)</tt>: Renames the table called +old_name+
+ # to +new_name+.
+ #
+ # === Deletion
+ #
+ # * <tt>drop_table(name)</tt>: Drops the table called +name+.
+ # * <tt>drop_join_table(table_1, table_2, options)</tt>: Drops the join table
+ # specified by the given arguments.
+ # * <tt>remove_column(table_name, column_name, type, options)</tt>: Removes the column
+ # named +column_name+ from the table called +table_name+.
+ # * <tt>remove_columns(table_name, *column_names)</tt>: Removes the given
+ # columns from the table definition.
+ # * <tt>remove_foreign_key(from_table, options_or_to_table)</tt>: Removes the
+ # given foreign key from the table called +table_name+.
+ # * <tt>remove_index(table_name, column: column_names)</tt>: Removes the index
+ # specified by +column_names+.
+ # * <tt>remove_index(table_name, name: index_name)</tt>: Removes the index
+ # specified by +index_name+.
+ # * <tt>remove_reference(table_name, ref_name, options)</tt>: Removes the
+ # reference(s) on +table_name+ specified by +ref_name+.
+ # * <tt>remove_timestamps(table_name, options)</tt>: Removes the timestamp
+ # columns (+created_at+ and +updated_at+) from the table definition.
+ #
+ # == Irreversible transformations
+ #
+ # Some transformations are destructive in a manner that cannot be reversed.
+ # Migrations of that kind should raise an <tt>ActiveRecord::IrreversibleMigration</tt>
+ # exception in their +down+ method.
+ #
+ # == Running migrations from within Rails
+ #
+ # The Rails package has several tools to help create and apply migrations.
+ #
+ # To generate a new migration, you can use
+ # rails generate migration MyNewMigration
+ #
+ # where MyNewMigration is the name of your migration. The generator will
+ # create an empty migration file <tt>timestamp_my_new_migration.rb</tt>
+ # in the <tt>db/migrate/</tt> directory where <tt>timestamp</tt> is the
+ # UTC formatted date and time that the migration was generated.
+ #
+ # There is a special syntactic shortcut to generate migrations that add fields to a table.
+ #
+ # rails generate migration add_fieldname_to_tablename fieldname:string
+ #
+ # This will generate the file <tt>timestamp_add_fieldname_to_tablename.rb</tt>, which will look like this:
+ # class AddFieldnameToTablename < ActiveRecord::Migration[5.0]
+ # def change
+ # add_column :tablenames, :fieldname, :string
+ # end
+ # end
+ #
+ # To run migrations against the currently configured database, use
+ # <tt>rails db:migrate</tt>. This will update the database by running all of the
+ # pending migrations, creating the <tt>schema_migrations</tt> table
+ # (see "About the schema_migrations table" section below) if missing. It will also
+ # invoke the db:schema:dump task, which will update your db/schema.rb file
+ # to match the structure of your database.
+ #
+ # To roll the database back to a previous migration version, use
+ # <tt>rails db:migrate VERSION=X</tt> where <tt>X</tt> is the version to which
+ # you wish to downgrade. Alternatively, you can also use the STEP option if you
+ # wish to rollback last few migrations. <tt>rails db:migrate STEP=2</tt> will rollback
+ # the latest two migrations.
+ #
+ # If any of the migrations throw an <tt>ActiveRecord::IrreversibleMigration</tt> exception,
+ # that step will fail and you'll have some manual work to do.
+ #
+ # == Database support
+ #
+ # Migrations are currently supported in MySQL, PostgreSQL, SQLite,
+ # SQL Server, and Oracle (all supported databases except DB2).
+ #
+ # == More examples
+ #
+ # Not all migrations change the schema. Some just fix the data:
+ #
+ # class RemoveEmptyTags < ActiveRecord::Migration[5.0]
+ # def up
+ # Tag.all.each { |tag| tag.destroy if tag.pages.empty? }
+ # end
+ #
+ # def down
+ # # not much we can do to restore deleted data
+ # raise ActiveRecord::IrreversibleMigration, "Can't recover the deleted tags"
+ # end
+ # end
+ #
+ # Others remove columns when they migrate up instead of down:
+ #
+ # class RemoveUnnecessaryItemAttributes < ActiveRecord::Migration[5.0]
+ # def up
+ # remove_column :items, :incomplete_items_count
+ # remove_column :items, :completed_items_count
+ # end
+ #
+ # def down
+ # add_column :items, :incomplete_items_count
+ # add_column :items, :completed_items_count
+ # end
+ # end
+ #
+ # And sometimes you need to do something in SQL not abstracted directly by migrations:
+ #
+ # class MakeJoinUnique < ActiveRecord::Migration[5.0]
+ # def up
+ # execute "ALTER TABLE `pages_linked_pages` ADD UNIQUE `page_id_linked_page_id` (`page_id`,`linked_page_id`)"
+ # end
+ #
+ # def down
+ # execute "ALTER TABLE `pages_linked_pages` DROP INDEX `page_id_linked_page_id`"
+ # end
+ # end
+ #
+ # == Using a model after changing its table
+ #
+ # Sometimes you'll want to add a column in a migration and populate it
+ # immediately after. In that case, you'll need to make a call to
+ # <tt>Base#reset_column_information</tt> in order to ensure that the model has the
+ # latest column data from after the new column was added. Example:
+ #
+ # class AddPeopleSalary < ActiveRecord::Migration[5.0]
+ # def up
+ # add_column :people, :salary, :integer
+ # Person.reset_column_information
+ # Person.all.each do |p|
+ # p.update_attribute :salary, SalaryCalculator.compute(p)
+ # end
+ # end
+ # end
+ #
+ # == Controlling verbosity
+ #
+ # By default, migrations will describe the actions they are taking, writing
+ # them to the console as they happen, along with benchmarks describing how
+ # long each step took.
+ #
+ # You can quiet them down by setting ActiveRecord::Migration.verbose = false.
+ #
+ # You can also insert your own messages and benchmarks by using the +say_with_time+
+ # method:
+ #
+ # def up
+ # ...
+ # say_with_time "Updating salaries..." do
+ # Person.all.each do |p|
+ # p.update_attribute :salary, SalaryCalculator.compute(p)
+ # end
+ # end
+ # ...
+ # end
+ #
+ # The phrase "Updating salaries..." would then be printed, along with the
+ # benchmark for the block when the block completes.
+ #
+ # == Timestamped Migrations
+ #
+ # By default, Rails generates migrations that look like:
+ #
+ # 20080717013526_your_migration_name.rb
+ #
+ # The prefix is a generation timestamp (in UTC).
+ #
+ # If you'd prefer to use numeric prefixes, you can turn timestamped migrations
+ # off by setting:
+ #
+ # config.active_record.timestamped_migrations = false
+ #
+ # In application.rb.
+ #
+ # == Reversible Migrations
+ #
+ # Reversible migrations are migrations that know how to go +down+ for you.
+ # You simply supply the +up+ logic, and the Migration system figures out
+ # how to execute the down commands for you.
+ #
+ # To define a reversible migration, define the +change+ method in your
+ # migration like this:
+ #
+ # class TenderloveMigration < ActiveRecord::Migration[5.0]
+ # def change
+ # create_table(:horses) do |t|
+ # t.column :content, :text
+ # t.column :remind_at, :datetime
+ # end
+ # end
+ # end
+ #
+ # This migration will create the horses table for you on the way up, and
+ # automatically figure out how to drop the table on the way down.
+ #
+ # Some commands like +remove_column+ cannot be reversed. If you care to
+ # define how to move up and down in these cases, you should define the +up+
+ # and +down+ methods as before.
+ #
+ # If a command cannot be reversed, an
+ # <tt>ActiveRecord::IrreversibleMigration</tt> exception will be raised when
+ # the migration is moving down.
+ #
+ # For a list of commands that are reversible, please see
+ # <tt>ActiveRecord::Migration::CommandRecorder</tt>.
+ #
+ # == Transactional Migrations
+ #
+ # If the database adapter supports DDL transactions, all migrations will
+ # automatically be wrapped in a transaction. There are queries that you
+ # can't execute inside a transaction though, and for these situations
+ # you can turn the automatic transactions off.
+ #
+ # class ChangeEnum < ActiveRecord::Migration[5.0]
+ # disable_ddl_transaction!
+ #
+ # def up
+ # execute "ALTER TYPE model_size ADD VALUE 'new_value'"
+ # end
+ # end
+ #
+ # Remember that you can still open your own transactions, even if you
+ # are in a Migration with <tt>self.disable_ddl_transaction!</tt>.
+ class Migration
+ autoload :CommandRecorder, "active_record/migration/command_recorder"
+ autoload :Compatibility, "active_record/migration/compatibility"
+
+ # This must be defined before the inherited hook, below
+ class Current < Migration # :nodoc:
+ end
+
+ def self.inherited(subclass) # :nodoc:
+ super
+ if subclass.superclass == Migration
+ raise StandardError, "Directly inheriting from ActiveRecord::Migration is not supported. " \
+ "Please specify the Rails release the migration was written for:\n" \
+ "\n" \
+ " class #{subclass} < ActiveRecord::Migration[4.2]"
+ end
+ end
+
+ def self.[](version)
+ Compatibility.find(version)
+ end
+
+ def self.current_version
+ ActiveRecord::VERSION::STRING.to_f
+ end
+
+ MigrationFilenameRegexp = /\A([0-9]+)_([_a-z0-9]*)\.?([_a-z0-9]*)?\.rb\z/ # :nodoc:
+
+ # This class is used to verify that all migrations have been run before
+ # loading a web page if <tt>config.active_record.migration_error</tt> is set to :page_load
+ class CheckPending
+ def initialize(app)
+ @app = app
+ @last_check = 0
+ end
+
+ def call(env)
+ mtime = ActiveRecord::Migrator.last_migration.mtime.to_i
+ if @last_check < mtime
+ ActiveRecord::Migration.check_pending!(connection)
+ @last_check = mtime
+ end
+ @app.call(env)
+ end
+
+ private
+
+ def connection
+ ActiveRecord::Base.connection
+ end
+ end
+
+ class << self
+ attr_accessor :delegate # :nodoc:
+ attr_accessor :disable_ddl_transaction # :nodoc:
+
+ def nearest_delegate # :nodoc:
+ delegate || superclass.nearest_delegate
+ end
+
+ # Raises <tt>ActiveRecord::PendingMigrationError</tt> error if any migrations are pending.
+ def check_pending!(connection = Base.connection)
+ raise ActiveRecord::PendingMigrationError if ActiveRecord::Migrator.needs_migration?(connection)
+ end
+
+ def load_schema_if_pending!
+ if ActiveRecord::Migrator.needs_migration? || !ActiveRecord::Migrator.any_migrations?
+ # Roundtrip to Rake to allow plugins to hook into database initialization.
+ FileUtils.cd Rails.root do
+ current_config = Base.connection_config
+ Base.clear_all_connections!
+ system("bin/rails db:test:prepare")
+ # Establish a new connection, the old database may be gone (db:test:prepare uses purge)
+ Base.establish_connection(current_config)
+ end
+ check_pending!
+ end
+ end
+
+ def maintain_test_schema! # :nodoc:
+ if ActiveRecord::Base.maintain_test_schema
+ suppress_messages { load_schema_if_pending! }
+ end
+ end
+
+ def method_missing(name, *args, &block) # :nodoc:
+ nearest_delegate.send(name, *args, &block)
+ end
+
+ def migrate(direction)
+ new.migrate direction
+ end
+
+ # Disable the transaction wrapping this migration.
+ # You can still create your own transactions even after calling #disable_ddl_transaction!
+ #
+ # For more details read the {"Transactional Migrations" section above}[rdoc-ref:Migration].
+ def disable_ddl_transaction!
+ @disable_ddl_transaction = true
+ end
+ end
+
+ def disable_ddl_transaction # :nodoc:
+ self.class.disable_ddl_transaction
+ end
+
+ cattr_accessor :verbose
+ attr_accessor :name, :version
+
+ def initialize(name = self.class.name, version = nil)
+ @name = name
+ @version = version
+ @connection = nil
+ end
+
+ self.verbose = true
+ # instantiate the delegate object after initialize is defined
+ self.delegate = new
+
+ # Reverses the migration commands for the given block and
+ # the given migrations.
+ #
+ # The following migration will remove the table 'horses'
+ # and create the table 'apples' on the way up, and the reverse
+ # on the way down.
+ #
+ # class FixTLMigration < ActiveRecord::Migration[5.0]
+ # def change
+ # revert do
+ # create_table(:horses) do |t|
+ # t.text :content
+ # t.datetime :remind_at
+ # end
+ # end
+ # create_table(:apples) do |t|
+ # t.string :variety
+ # end
+ # end
+ # end
+ #
+ # Or equivalently, if +TenderloveMigration+ is defined as in the
+ # documentation for Migration:
+ #
+ # require_relative '20121212123456_tenderlove_migration'
+ #
+ # class FixupTLMigration < ActiveRecord::Migration[5.0]
+ # def change
+ # revert TenderloveMigration
+ #
+ # create_table(:apples) do |t|
+ # t.string :variety
+ # end
+ # end
+ # end
+ #
+ # This command can be nested.
+ def revert(*migration_classes)
+ run(*migration_classes.reverse, revert: true) unless migration_classes.empty?
+ if block_given?
+ if connection.respond_to? :revert
+ connection.revert { yield }
+ else
+ recorder = CommandRecorder.new(connection)
+ @connection = recorder
+ suppress_messages do
+ connection.revert { yield }
+ end
+ @connection = recorder.delegate
+ recorder.commands.each do |cmd, args, block|
+ send(cmd, *args, &block)
+ end
+ end
+ end
+ end
+
+ def reverting?
+ connection.respond_to?(:reverting) && connection.reverting
+ end
+
+ ReversibleBlockHelper = Struct.new(:reverting) do # :nodoc:
+ def up
+ yield unless reverting
+ end
+
+ def down
+ yield if reverting
+ end
+ end
+
+ # Used to specify an operation that can be run in one direction or another.
+ # Call the methods +up+ and +down+ of the yielded object to run a block
+ # only in one given direction.
+ # The whole block will be called in the right order within the migration.
+ #
+ # In the following example, the looping on users will always be done
+ # when the three columns 'first_name', 'last_name' and 'full_name' exist,
+ # even when migrating down:
+ #
+ # class SplitNameMigration < ActiveRecord::Migration[5.0]
+ # def change
+ # add_column :users, :first_name, :string
+ # add_column :users, :last_name, :string
+ #
+ # reversible do |dir|
+ # User.reset_column_information
+ # User.all.each do |u|
+ # dir.up { u.first_name, u.last_name = u.full_name.split(' ') }
+ # dir.down { u.full_name = "#{u.first_name} #{u.last_name}" }
+ # u.save
+ # end
+ # end
+ #
+ # revert { add_column :users, :full_name, :string }
+ # end
+ # end
+ def reversible
+ helper = ReversibleBlockHelper.new(reverting?)
+ execute_block { yield helper }
+ end
+
+ # Runs the given migration classes.
+ # Last argument can specify options:
+ # - :direction (default is :up)
+ # - :revert (default is false)
+ def run(*migration_classes)
+ opts = migration_classes.extract_options!
+ dir = opts[:direction] || :up
+ dir = (dir == :down ? :up : :down) if opts[:revert]
+ if reverting?
+ # If in revert and going :up, say, we want to execute :down without reverting, so
+ revert { run(*migration_classes, direction: dir, revert: true) }
+ else
+ migration_classes.each do |migration_class|
+ migration_class.new.exec_migration(connection, dir)
+ end
+ end
+ end
+
+ def up
+ self.class.delegate = self
+ return unless self.class.respond_to?(:up)
+ self.class.up
+ end
+
+ def down
+ self.class.delegate = self
+ return unless self.class.respond_to?(:down)
+ self.class.down
+ end
+
+ # Execute this migration in the named direction
+ def migrate(direction)
+ return unless respond_to?(direction)
+
+ case direction
+ when :up then announce "migrating"
+ when :down then announce "reverting"
+ end
+
+ time = nil
+ ActiveRecord::Base.connection_pool.with_connection do |conn|
+ time = Benchmark.measure do
+ exec_migration(conn, direction)
+ end
+ end
+
+ case direction
+ when :up then announce "migrated (%.4fs)" % time.real; write
+ when :down then announce "reverted (%.4fs)" % time.real; write
+ end
+ end
+
+ def exec_migration(conn, direction)
+ @connection = conn
+ if respond_to?(:change)
+ if direction == :down
+ revert { change }
+ else
+ change
+ end
+ else
+ send(direction)
+ end
+ ensure
+ @connection = nil
+ end
+
+ def write(text = "")
+ puts(text) if verbose
+ end
+
+ def announce(message)
+ text = "#{version} #{name}: #{message}"
+ length = [0, 75 - text.length].max
+ write "== %s %s" % [text, "=" * length]
+ end
+
+ def say(message, subitem = false)
+ write "#{subitem ? " ->" : "--"} #{message}"
+ end
+
+ def say_with_time(message)
+ say(message)
+ result = nil
+ time = Benchmark.measure { result = yield }
+ say "%.4fs" % time.real, :subitem
+ say("#{result} rows", :subitem) if result.is_a?(Integer)
+ result
+ end
+
+ def suppress_messages
+ save, self.verbose = verbose, false
+ yield
+ ensure
+ self.verbose = save
+ end
+
+ def connection
+ @connection || ActiveRecord::Base.connection
+ end
+
+ def method_missing(method, *arguments, &block)
+ arg_list = arguments.map(&:inspect) * ", "
+
+ say_with_time "#{method}(#{arg_list})" do
+ unless connection.respond_to? :revert
+ unless arguments.empty? || [:execute, :enable_extension, :disable_extension].include?(method)
+ arguments[0] = proper_table_name(arguments.first, table_name_options)
+ if [:rename_table, :add_foreign_key].include?(method) ||
+ (method == :remove_foreign_key && !arguments.second.is_a?(Hash))
+ arguments[1] = proper_table_name(arguments.second, table_name_options)
+ end
+ end
+ end
+ return super unless connection.respond_to?(method)
+ connection.send(method, *arguments, &block)
+ end
+ end
+
+ def copy(destination, sources, options = {})
+ copied = []
+
+ FileUtils.mkdir_p(destination) unless File.exist?(destination)
+
+ destination_migrations = ActiveRecord::Migrator.migrations(destination)
+ last = destination_migrations.last
+ sources.each do |scope, path|
+ source_migrations = ActiveRecord::Migrator.migrations(path)
+
+ source_migrations.each do |migration|
+ source = File.binread(migration.filename)
+ inserted_comment = "# This migration comes from #{scope} (originally #{migration.version})\n"
+ magic_comments = "".dup
+ loop do
+ # If we have a magic comment in the original migration,
+ # insert our comment after the first newline(end of the magic comment line)
+ # so the magic keep working.
+ # Note that magic comments must be at the first line(except sh-bang).
+ source.sub!(/\A(?:#.*\b(?:en)?coding:\s*\S+|#\s*frozen_string_literal:\s*(?:true|false)).*\n/) do |magic_comment|
+ magic_comments << magic_comment; ""
+ end || break
+ end
+ source = "#{magic_comments}#{inserted_comment}#{source}"
+
+ if duplicate = destination_migrations.detect { |m| m.name == migration.name }
+ if options[:on_skip] && duplicate.scope != scope.to_s
+ options[:on_skip].call(scope, migration)
+ end
+ next
+ end
+
+ migration.version = next_migration_number(last ? last.version + 1 : 0).to_i
+ new_path = File.join(destination, "#{migration.version}_#{migration.name.underscore}.#{scope}.rb")
+ old_path, migration.filename = migration.filename, new_path
+ last = migration
+
+ File.binwrite(migration.filename, source)
+ copied << migration
+ options[:on_copy].call(scope, migration, old_path) if options[:on_copy]
+ destination_migrations << migration
+ end
+ end
+
+ copied
+ end
+
+ # Finds the correct table name given an Active Record object.
+ # Uses the Active Record object's own table_name, or pre/suffix from the
+ # options passed in.
+ def proper_table_name(name, options = {})
+ if name.respond_to? :table_name
+ name.table_name
+ else
+ "#{options[:table_name_prefix]}#{name}#{options[:table_name_suffix]}"
+ end
+ end
+
+ # Determines the version number of the next migration.
+ def next_migration_number(number)
+ if ActiveRecord::Base.timestamped_migrations
+ [Time.now.utc.strftime("%Y%m%d%H%M%S"), "%.14d" % number].max
+ else
+ SchemaMigration.normalize_migration_number(number)
+ end
+ end
+
+ # Builds a hash for use in ActiveRecord::Migration#proper_table_name using
+ # the Active Record object's table_name prefix and suffix
+ def table_name_options(config = ActiveRecord::Base) #:nodoc:
+ {
+ table_name_prefix: config.table_name_prefix,
+ table_name_suffix: config.table_name_suffix
+ }
+ end
+
+ private
+ def execute_block
+ if connection.respond_to? :execute_block
+ super # use normal delegation to record the block
+ else
+ yield
+ end
+ end
+ end
+
+ # MigrationProxy is used to defer loading of the actual migration classes
+ # until they are needed
+ MigrationProxy = Struct.new(:name, :version, :filename, :scope) do
+ def initialize(name, version, filename, scope)
+ super
+ @migration = nil
+ end
+
+ def basename
+ File.basename(filename)
+ end
+
+ def mtime
+ File.mtime filename
+ end
+
+ delegate :migrate, :announce, :write, :disable_ddl_transaction, to: :migration
+
+ private
+
+ def migration
+ @migration ||= load_migration
+ end
+
+ def load_migration
+ require(File.expand_path(filename))
+ name.constantize.new(name, version)
+ end
+ end
+
+ class NullMigration < MigrationProxy #:nodoc:
+ def initialize
+ super(nil, 0, nil, nil)
+ end
+
+ def mtime
+ 0
+ end
+ end
+
+ class Migrator#:nodoc:
+ class << self
+ attr_writer :migrations_paths
+ alias :migrations_path= :migrations_paths=
+
+ def migrate(migrations_paths, target_version = nil, &block)
+ case
+ when target_version.nil?
+ up(migrations_paths, target_version, &block)
+ when current_version == 0 && target_version == 0
+ []
+ when current_version > target_version
+ down(migrations_paths, target_version, &block)
+ else
+ up(migrations_paths, target_version, &block)
+ end
+ end
+
+ def rollback(migrations_paths, steps = 1)
+ move(:down, migrations_paths, steps)
+ end
+
+ def forward(migrations_paths, steps = 1)
+ move(:up, migrations_paths, steps)
+ end
+
+ def up(migrations_paths, target_version = nil)
+ migrations = migrations(migrations_paths)
+ migrations.select! { |m| yield m } if block_given?
+
+ new(:up, migrations, target_version).migrate
+ end
+
+ def down(migrations_paths, target_version = nil)
+ migrations = migrations(migrations_paths)
+ migrations.select! { |m| yield m } if block_given?
+
+ new(:down, migrations, target_version).migrate
+ end
+
+ def run(direction, migrations_paths, target_version)
+ new(direction, migrations(migrations_paths), target_version).run
+ end
+
+ def open(migrations_paths)
+ new(:up, migrations(migrations_paths), nil)
+ end
+
+ def schema_migrations_table_name
+ SchemaMigration.table_name
+ end
+ deprecate :schema_migrations_table_name
+
+ def get_all_versions(connection = Base.connection)
+ if SchemaMigration.table_exists?
+ SchemaMigration.all_versions.map(&:to_i)
+ else
+ []
+ end
+ end
+
+ def current_version(connection = Base.connection)
+ get_all_versions(connection).max || 0
+ end
+
+ def needs_migration?(connection = Base.connection)
+ (migrations(migrations_paths).collect(&:version) - get_all_versions(connection)).size > 0
+ end
+
+ def any_migrations?
+ migrations(migrations_paths).any?
+ end
+
+ def last_migration #:nodoc:
+ migrations(migrations_paths).last || NullMigration.new
+ end
+
+ def migrations_paths
+ @migrations_paths ||= ["db/migrate"]
+ # just to not break things if someone uses: migrations_path = some_string
+ Array(@migrations_paths)
+ end
+
+ def parse_migration_filename(filename) # :nodoc:
+ File.basename(filename).scan(Migration::MigrationFilenameRegexp).first
+ end
+
+ def migrations(paths)
+ paths = Array(paths)
+
+ migrations = migration_files(paths).map do |file|
+ version, name, scope = parse_migration_filename(file)
+ raise IllegalMigrationNameError.new(file) unless version
+ version = version.to_i
+ name = name.camelize
+
+ MigrationProxy.new(name, version, file, scope)
+ end
+
+ migrations.sort_by(&:version)
+ end
+
+ def migrations_status(paths)
+ paths = Array(paths)
+
+ db_list = ActiveRecord::SchemaMigration.normalized_versions
+
+ file_list = migration_files(paths).map do |file|
+ version, name, scope = parse_migration_filename(file)
+ raise IllegalMigrationNameError.new(file) unless version
+ version = ActiveRecord::SchemaMigration.normalize_migration_number(version)
+ status = db_list.delete(version) ? "up" : "down"
+ [status, version, (name + scope).humanize]
+ end.compact
+
+ db_list.map! do |version|
+ ["up", version, "********** NO FILE **********"]
+ end
+
+ (db_list + file_list).sort_by { |_, version, _| version }
+ end
+
+ def migration_files(paths)
+ Dir[*paths.flat_map { |path| "#{path}/**/[0-9]*_*.rb" }]
+ end
+
+ private
+
+ def move(direction, migrations_paths, steps)
+ migrator = new(direction, migrations(migrations_paths))
+
+ if current_version != 0 && !migrator.current_migration
+ raise UnknownMigrationVersionError.new(current_version)
+ end
+
+ start_index =
+ if current_version == 0
+ 0
+ else
+ migrator.migrations.index(migrator.current_migration)
+ end
+
+ finish = migrator.migrations[start_index + steps]
+ version = finish ? finish.version : 0
+ send(direction, migrations_paths, version)
+ end
+ end
+
+ def initialize(direction, migrations, target_version = nil)
+ @direction = direction
+ @target_version = target_version
+ @migrated_versions = nil
+ @migrations = migrations
+
+ validate(@migrations)
+
+ ActiveRecord::SchemaMigration.create_table
+ ActiveRecord::InternalMetadata.create_table
+ end
+
+ def current_version
+ migrated.max || 0
+ end
+
+ def current_migration
+ migrations.detect { |m| m.version == current_version }
+ end
+ alias :current :current_migration
+
+ def run
+ if use_advisory_lock?
+ with_advisory_lock { run_without_lock }
+ else
+ run_without_lock
+ end
+ end
+
+ def migrate
+ if use_advisory_lock?
+ with_advisory_lock { migrate_without_lock }
+ else
+ migrate_without_lock
+ end
+ end
+
+ def runnable
+ runnable = migrations[start..finish]
+ if up?
+ runnable.reject { |m| ran?(m) }
+ else
+ # skip the last migration if we're headed down, but not ALL the way down
+ runnable.pop if target
+ runnable.find_all { |m| ran?(m) }
+ end
+ end
+
+ def migrations
+ down? ? @migrations.reverse : @migrations.sort_by(&:version)
+ end
+
+ def pending_migrations
+ already_migrated = migrated
+ migrations.reject { |m| already_migrated.include?(m.version) }
+ end
+
+ def migrated
+ @migrated_versions || load_migrated
+ end
+
+ def load_migrated
+ @migrated_versions = Set.new(self.class.get_all_versions)
+ end
+
+ private
+
+ # Used for running a specific migration.
+ def run_without_lock
+ migration = migrations.detect { |m| m.version == @target_version }
+ raise UnknownMigrationVersionError.new(@target_version) if migration.nil?
+ result = execute_migration_in_transaction(migration, @direction)
+
+ record_environment
+ result
+ end
+
+ # Used for running multiple migrations up to or down to a certain value.
+ def migrate_without_lock
+ if invalid_target?
+ raise UnknownMigrationVersionError.new(@target_version)
+ end
+
+ result = runnable.each do |migration|
+ execute_migration_in_transaction(migration, @direction)
+ end
+
+ record_environment
+ result
+ end
+
+ # Stores the current environment in the database.
+ def record_environment
+ return if down?
+ ActiveRecord::InternalMetadata[:environment] = ActiveRecord::Migrator.current_environment
+ end
+
+ def ran?(migration)
+ migrated.include?(migration.version.to_i)
+ end
+
+ # Return true if a valid version is not provided.
+ def invalid_target?
+ !target && @target_version && @target_version > 0
+ end
+
+ def execute_migration_in_transaction(migration, direction)
+ return if down? && !migrated.include?(migration.version.to_i)
+ return if up? && migrated.include?(migration.version.to_i)
+
+ Base.logger.info "Migrating to #{migration.name} (#{migration.version})" if Base.logger
+
+ ddl_transaction(migration) do
+ migration.migrate(direction)
+ record_version_state_after_migrating(migration.version)
+ end
+ rescue => e
+ msg = "An error has occurred, ".dup
+ msg << "this and " if use_transaction?(migration)
+ msg << "all later migrations canceled:\n\n#{e}"
+ raise StandardError, msg, e.backtrace
+ end
+
+ def target
+ migrations.detect { |m| m.version == @target_version }
+ end
+
+ def finish
+ migrations.index(target) || migrations.size - 1
+ end
+
+ def start
+ up? ? 0 : (migrations.index(current) || 0)
+ end
+
+ def validate(migrations)
+ name , = migrations.group_by(&:name).find { |_, v| v.length > 1 }
+ raise DuplicateMigrationNameError.new(name) if name
+
+ version , = migrations.group_by(&:version).find { |_, v| v.length > 1 }
+ raise DuplicateMigrationVersionError.new(version) if version
+ end
+
+ def record_version_state_after_migrating(version)
+ if down?
+ migrated.delete(version)
+ ActiveRecord::SchemaMigration.where(version: version.to_s).delete_all
+ else
+ migrated << version
+ ActiveRecord::SchemaMigration.create!(version: version.to_s)
+ end
+ end
+
+ def self.last_stored_environment
+ return nil if current_version == 0
+ raise NoEnvironmentInSchemaError unless ActiveRecord::InternalMetadata.table_exists?
+
+ environment = ActiveRecord::InternalMetadata[:environment]
+ raise NoEnvironmentInSchemaError unless environment
+ environment
+ end
+
+ def self.current_environment
+ ActiveRecord::ConnectionHandling::DEFAULT_ENV.call
+ end
+
+ def self.protected_environment?
+ ActiveRecord::Base.protected_environments.include?(last_stored_environment) if last_stored_environment
+ end
+
+ def up?
+ @direction == :up
+ end
+
+ def down?
+ @direction == :down
+ end
+
+ # Wrap the migration in a transaction only if supported by the adapter.
+ def ddl_transaction(migration)
+ if use_transaction?(migration)
+ Base.transaction { yield }
+ else
+ yield
+ end
+ end
+
+ def use_transaction?(migration)
+ !migration.disable_ddl_transaction && Base.connection.supports_ddl_transactions?
+ end
+
+ def use_advisory_lock?
+ Base.connection.supports_advisory_locks?
+ end
+
+ def with_advisory_lock
+ lock_id = generate_migrator_advisory_lock_id
+ got_lock = Base.connection.get_advisory_lock(lock_id)
+ raise ConcurrentMigrationError unless got_lock
+ load_migrated # reload schema_migrations to be sure it wasn't changed by another process before we got the lock
+ yield
+ ensure
+ Base.connection.release_advisory_lock(lock_id) if got_lock
+ end
+
+ MIGRATOR_SALT = 2053462845
+ def generate_migrator_advisory_lock_id
+ db_name_hash = Zlib.crc32(Base.connection.current_database)
+ MIGRATOR_SALT * db_name_hash
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/migration/command_recorder.rb b/activerecord/lib/active_record/migration/command_recorder.rb
new file mode 100644
index 0000000000..a3a5e0fa16
--- /dev/null
+++ b/activerecord/lib/active_record/migration/command_recorder.rb
@@ -0,0 +1,240 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class Migration
+ # <tt>ActiveRecord::Migration::CommandRecorder</tt> records commands done during
+ # a migration and knows how to reverse those commands. The CommandRecorder
+ # knows how to invert the following commands:
+ #
+ # * add_column
+ # * add_foreign_key
+ # * add_index
+ # * add_reference
+ # * add_timestamps
+ # * change_column
+ # * change_column_default (must supply a :from and :to option)
+ # * change_column_null
+ # * create_join_table
+ # * create_table
+ # * disable_extension
+ # * drop_join_table
+ # * drop_table (must supply a block)
+ # * enable_extension
+ # * remove_column (must supply a type)
+ # * remove_columns (must specify at least one column name or more)
+ # * remove_foreign_key (must supply a second table)
+ # * remove_index
+ # * remove_reference
+ # * remove_timestamps
+ # * rename_column
+ # * rename_index
+ # * rename_table
+ class CommandRecorder
+ ReversibleAndIrreversibleMethods = [:create_table, :create_join_table, :rename_table, :add_column, :remove_column,
+ :rename_index, :rename_column, :add_index, :remove_index, :add_timestamps, :remove_timestamps,
+ :change_column_default, :add_reference, :remove_reference, :transaction,
+ :drop_join_table, :drop_table, :execute_block, :enable_extension, :disable_extension,
+ :change_column, :execute, :remove_columns, :change_column_null,
+ :add_foreign_key, :remove_foreign_key
+ ]
+ include JoinTable
+
+ attr_accessor :commands, :delegate, :reverting
+
+ def initialize(delegate = nil)
+ @commands = []
+ @delegate = delegate
+ @reverting = false
+ end
+
+ # While executing the given block, the recorded will be in reverting mode.
+ # All commands recorded will end up being recorded reverted
+ # and in reverse order.
+ # For example:
+ #
+ # recorder.revert{ recorder.record(:rename_table, [:old, :new]) }
+ # # same effect as recorder.record(:rename_table, [:new, :old])
+ def revert
+ @reverting = !@reverting
+ previous = @commands
+ @commands = []
+ yield
+ ensure
+ @commands = previous.concat(@commands.reverse)
+ @reverting = !@reverting
+ end
+
+ # Record +command+. +command+ should be a method name and arguments.
+ # For example:
+ #
+ # recorder.record(:method_name, [:arg1, :arg2])
+ def record(*command, &block)
+ if @reverting
+ @commands << inverse_of(*command, &block)
+ else
+ @commands << (command << block)
+ end
+ end
+
+ # Returns the inverse of the given command. For example:
+ #
+ # recorder.inverse_of(:rename_table, [:old, :new])
+ # # => [:rename_table, [:new, :old]]
+ #
+ # This method will raise an +IrreversibleMigration+ exception if it cannot
+ # invert the +command+.
+ def inverse_of(command, args, &block)
+ method = :"invert_#{command}"
+ raise IrreversibleMigration, <<-MSG.strip_heredoc unless respond_to?(method, true)
+ This migration uses #{command}, which is not automatically reversible.
+ To make the migration reversible you can either:
+ 1. Define #up and #down methods in place of the #change method.
+ 2. Use the #reversible method to define reversible behavior.
+ MSG
+ send(method, args, &block)
+ end
+
+ ReversibleAndIrreversibleMethods.each do |method|
+ class_eval <<-EOV, __FILE__, __LINE__ + 1
+ def #{method}(*args, &block) # def create_table(*args, &block)
+ record(:"#{method}", args, &block) # record(:create_table, args, &block)
+ end # end
+ EOV
+ end
+ alias :add_belongs_to :add_reference
+ alias :remove_belongs_to :remove_reference
+
+ def change_table(table_name, options = {}) # :nodoc:
+ yield delegate.update_table_definition(table_name, self)
+ end
+
+ private
+
+ module StraightReversions
+ private
+ { transaction: :transaction,
+ execute_block: :execute_block,
+ create_table: :drop_table,
+ create_join_table: :drop_join_table,
+ add_column: :remove_column,
+ add_timestamps: :remove_timestamps,
+ add_reference: :remove_reference,
+ enable_extension: :disable_extension
+ }.each do |cmd, inv|
+ [[inv, cmd], [cmd, inv]].uniq.each do |method, inverse|
+ class_eval <<-EOV, __FILE__, __LINE__ + 1
+ def invert_#{method}(args, &block) # def invert_create_table(args, &block)
+ [:#{inverse}, args, block] # [:drop_table, args, block]
+ end # end
+ EOV
+ end
+ end
+ end
+
+ include StraightReversions
+
+ def invert_drop_table(args, &block)
+ if args.size == 1 && block == nil
+ raise ActiveRecord::IrreversibleMigration, "To avoid mistakes, drop_table is only reversible if given options or a block (can be empty)."
+ end
+ super
+ end
+
+ def invert_rename_table(args)
+ [:rename_table, args.reverse]
+ end
+
+ def invert_remove_column(args)
+ raise ActiveRecord::IrreversibleMigration, "remove_column is only reversible if given a type." if args.size <= 2
+ super
+ end
+
+ def invert_rename_index(args)
+ [:rename_index, [args.first] + args.last(2).reverse]
+ end
+
+ def invert_rename_column(args)
+ [:rename_column, [args.first] + args.last(2).reverse]
+ end
+
+ def invert_add_index(args)
+ table, columns, options = *args
+ options ||= {}
+
+ index_name = options[:name]
+ options_hash = index_name ? { name: index_name } : { column: columns }
+
+ [:remove_index, [table, options_hash]]
+ end
+
+ def invert_remove_index(args)
+ table, options_or_column = *args
+ if (options = options_or_column).is_a?(Hash)
+ unless options[:column]
+ raise ActiveRecord::IrreversibleMigration, "remove_index is only reversible if given a :column option."
+ end
+ options = options.dup
+ [:add_index, [table, options.delete(:column), options]]
+ elsif (column = options_or_column).present?
+ [:add_index, [table, column]]
+ end
+ end
+
+ alias :invert_add_belongs_to :invert_add_reference
+ alias :invert_remove_belongs_to :invert_remove_reference
+
+ def invert_change_column_default(args)
+ table, column, options = *args
+
+ unless options && options.is_a?(Hash) && options.has_key?(:from) && options.has_key?(:to)
+ raise ActiveRecord::IrreversibleMigration, "change_column_default is only reversible if given a :from and :to option."
+ end
+
+ [:change_column_default, [table, column, from: options[:to], to: options[:from]]]
+ end
+
+ def invert_change_column_null(args)
+ args[2] = !args[2]
+ [:change_column_null, args]
+ end
+
+ def invert_add_foreign_key(args)
+ from_table, to_table, add_options = args
+ add_options ||= {}
+
+ if add_options[:name]
+ options = { name: add_options[:name] }
+ elsif add_options[:column]
+ options = { column: add_options[:column] }
+ else
+ options = to_table
+ end
+
+ [:remove_foreign_key, [from_table, options]]
+ end
+
+ def invert_remove_foreign_key(args)
+ from_table, to_table, remove_options = args
+ raise ActiveRecord::IrreversibleMigration, "remove_foreign_key is only reversible if given a second table" if to_table.nil? || to_table.is_a?(Hash)
+
+ reversed_args = [from_table, to_table]
+ reversed_args << remove_options if remove_options
+
+ [:add_foreign_key, reversed_args]
+ end
+
+ def respond_to_missing?(method, _)
+ super || delegate.respond_to?(method)
+ end
+
+ # Forwards any missing method call to the \target.
+ def method_missing(method, *args, &block)
+ if delegate.respond_to?(method)
+ delegate.public_send(method, *args, &block)
+ else
+ super
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/migration/compatibility.rb b/activerecord/lib/active_record/migration/compatibility.rb
new file mode 100644
index 0000000000..d6595c9355
--- /dev/null
+++ b/activerecord/lib/active_record/migration/compatibility.rb
@@ -0,0 +1,170 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class Migration
+ module Compatibility # :nodoc: all
+ def self.find(version)
+ version = version.to_s
+ name = "V#{version.tr('.', '_')}"
+ unless const_defined?(name)
+ versions = constants.grep(/\AV[0-9_]+\z/).map { |s| s.to_s.delete("V").tr("_", ".").inspect }
+ raise ArgumentError, "Unknown migration version #{version.inspect}; expected one of #{versions.sort.join(', ')}"
+ end
+ const_get(name)
+ end
+
+ V5_2 = Current
+
+ class V5_1 < V5_2
+ end
+
+ class V5_0 < V5_1
+ module TableDefinition
+ def references(*args, **options)
+ super(*args, type: :integer, **options)
+ end
+ alias :belongs_to :references
+ end
+
+ def create_table(table_name, options = {})
+ if adapter_name == "PostgreSQL"
+ if options[:id] == :uuid && !options.key?(:default)
+ options[:default] = "uuid_generate_v4()"
+ end
+ end
+
+ unless adapter_name == "Mysql2" && options[:id] == :bigint
+ if [:integer, :bigint].include?(options[:id]) && !options.key?(:default)
+ options[:default] = nil
+ end
+ end
+
+ # Since 5.1 Postgres adapter uses bigserial type for primary
+ # keys by default and MySQL uses bigint. This compat layer makes old migrations utilize
+ # serial/int type instead -- the way it used to work before 5.1.
+ unless options.key?(:id)
+ options[:id] = :integer
+ end
+
+ if block_given?
+ super(table_name, options) do |t|
+ class << t
+ prepend TableDefinition
+ end
+ yield t
+ end
+ else
+ super
+ end
+ end
+
+ def change_table(table_name, options = {})
+ if block_given?
+ super(table_name, options) do |t|
+ class << t
+ prepend TableDefinition
+ end
+ yield t
+ end
+ else
+ super
+ end
+ end
+
+ def add_reference(table_name, ref_name, **options)
+ super(table_name, ref_name, type: :integer, **options)
+ end
+ alias :add_belongs_to :add_reference
+ end
+
+ class V4_2 < V5_0
+ module TableDefinition
+ def references(*, **options)
+ options[:index] ||= false
+ super
+ end
+ alias :belongs_to :references
+
+ def timestamps(**options)
+ options[:null] = true if options[:null].nil?
+ super
+ end
+ end
+
+ def create_table(table_name, options = {})
+ if block_given?
+ super(table_name, options) do |t|
+ class << t
+ prepend TableDefinition
+ end
+ yield t
+ end
+ else
+ super
+ end
+ end
+
+ def change_table(table_name, options = {})
+ if block_given?
+ super(table_name, options) do |t|
+ class << t
+ prepend TableDefinition
+ end
+ yield t
+ end
+ else
+ super
+ end
+ end
+
+ def add_reference(*, **options)
+ options[:index] ||= false
+ super
+ end
+ alias :add_belongs_to :add_reference
+
+ def add_timestamps(_, **options)
+ options[:null] = true if options[:null].nil?
+ super
+ end
+
+ def index_exists?(table_name, column_name, options = {})
+ column_names = Array(column_name).map(&:to_s)
+ options[:name] =
+ if options[:name].present?
+ options[:name].to_s
+ else
+ index_name(table_name, column: column_names)
+ end
+ super
+ end
+
+ def remove_index(table_name, options = {})
+ options = { column: options } unless options.is_a?(Hash)
+ options[:name] = index_name_for_remove(table_name, options)
+ super(table_name, options)
+ end
+
+ private
+
+ def index_name_for_remove(table_name, options = {})
+ index_name = index_name(table_name, options)
+
+ unless index_name_exists?(table_name, index_name)
+ if options.is_a?(Hash) && options.has_key?(:name)
+ options_without_column = options.dup
+ options_without_column.delete :column
+ index_name_without_column = index_name(table_name, options_without_column)
+
+ return index_name_without_column if index_name_exists?(table_name, index_name_without_column)
+ end
+
+ raise ArgumentError, "Index name '#{index_name}' on table '#{table_name}' does not exist"
+ end
+
+ index_name
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/migration/join_table.rb b/activerecord/lib/active_record/migration/join_table.rb
new file mode 100644
index 0000000000..9abb289bb0
--- /dev/null
+++ b/activerecord/lib/active_record/migration/join_table.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class Migration
+ module JoinTable #:nodoc:
+ private
+
+ def find_join_table_name(table_1, table_2, options = {})
+ options.delete(:table_name) || join_table_name(table_1, table_2)
+ end
+
+ def join_table_name(table_1, table_2)
+ ModelSchema.derive_join_table_name(table_1, table_2).to_sym
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/model_schema.rb b/activerecord/lib/active_record/model_schema.rb
new file mode 100644
index 0000000000..34c0ef4e75
--- /dev/null
+++ b/activerecord/lib/active_record/model_schema.rb
@@ -0,0 +1,515 @@
+# frozen_string_literal: true
+
+require "monitor"
+
+module ActiveRecord
+ module ModelSchema
+ extend ActiveSupport::Concern
+
+ ##
+ # :singleton-method: primary_key_prefix_type
+ # :call-seq: primary_key_prefix_type
+ #
+ # The prefix type that will be prepended to every primary key column name.
+ # The options are +:table_name+ and +:table_name_with_underscore+. If the first is specified,
+ # the Product class will look for "productid" instead of "id" as the primary column. If the
+ # latter is specified, the Product class will look for "product_id" instead of "id". Remember
+ # that this is a global setting for all Active Records.
+
+ ##
+ # :singleton-method: primary_key_prefix_type=
+ # :call-seq: primary_key_prefix_type=(prefix_type)
+ #
+ # Sets the prefix type that will be prepended to every primary key column name.
+ # The options are +:table_name+ and +:table_name_with_underscore+. If the first is specified,
+ # the Product class will look for "productid" instead of "id" as the primary column. If the
+ # latter is specified, the Product class will look for "product_id" instead of "id". Remember
+ # that this is a global setting for all Active Records.
+
+ ##
+ # :singleton-method: table_name_prefix
+ # :call-seq: table_name_prefix
+ #
+ # The prefix string to prepend to every table name.
+
+ ##
+ # :singleton-method: table_name_prefix=
+ # :call-seq: table_name_prefix=(prefix)
+ #
+ # Sets the prefix string to prepend to every table name. So if set to "basecamp_", all table
+ # names will be named like "basecamp_projects", "basecamp_people", etc. This is a convenient
+ # way of creating a namespace for tables in a shared database. By default, the prefix is the
+ # empty string.
+ #
+ # If you are organising your models within modules you can add a prefix to the models within
+ # a namespace by defining a singleton method in the parent module called table_name_prefix which
+ # returns your chosen prefix.
+
+ ##
+ # :singleton-method: table_name_suffix
+ # :call-seq: table_name_suffix
+ #
+ # The suffix string to append to every table name.
+
+ ##
+ # :singleton-method: table_name_suffix=
+ # :call-seq: table_name_suffix=(suffix)
+ #
+ # Works like +table_name_prefix=+, but appends instead of prepends (set to "_basecamp" gives "projects_basecamp",
+ # "people_basecamp"). By default, the suffix is the empty string.
+ #
+ # If you are organising your models within modules, you can add a suffix to the models within
+ # a namespace by defining a singleton method in the parent module called table_name_suffix which
+ # returns your chosen suffix.
+
+ ##
+ # :singleton-method: schema_migrations_table_name
+ # :call-seq: schema_migrations_table_name
+ #
+ # The name of the schema migrations table. By default, the value is <tt>"schema_migrations"</tt>.
+
+ ##
+ # :singleton-method: schema_migrations_table_name=
+ # :call-seq: schema_migrations_table_name=(table_name)
+ #
+ # Sets the name of the schema migrations table.
+
+ ##
+ # :singleton-method: internal_metadata_table_name
+ # :call-seq: internal_metadata_table_name
+ #
+ # The name of the internal metadata table. By default, the value is <tt>"ar_internal_metadata"</tt>.
+
+ ##
+ # :singleton-method: internal_metadata_table_name=
+ # :call-seq: internal_metadata_table_name=(table_name)
+ #
+ # Sets the name of the internal metadata table.
+
+ ##
+ # :singleton-method: protected_environments
+ # :call-seq: protected_environments
+ #
+ # The array of names of environments where destructive actions should be prohibited. By default,
+ # the value is <tt>["production"]</tt>.
+
+ ##
+ # :singleton-method: protected_environments=
+ # :call-seq: protected_environments=(environments)
+ #
+ # Sets an array of names of environments where destructive actions should be prohibited.
+
+ ##
+ # :singleton-method: pluralize_table_names
+ # :call-seq: pluralize_table_names
+ #
+ # Indicates whether table names should be the pluralized versions of the corresponding class names.
+ # If true, the default table name for a Product class will be "products". If false, it would just be "product".
+ # See table_name for the full rules on table/class naming. This is true, by default.
+
+ ##
+ # :singleton-method: pluralize_table_names=
+ # :call-seq: pluralize_table_names=(value)
+ #
+ # Set whether table names should be the pluralized versions of the corresponding class names.
+ # If true, the default table name for a Product class will be "products". If false, it would just be "product".
+ # See table_name for the full rules on table/class naming. This is true, by default.
+
+ ##
+ # :singleton-method: ignored_columns
+ # :call-seq: ignored_columns
+ #
+ # The list of columns names the model should ignore. Ignored columns won't have attribute
+ # accessors defined, and won't be referenced in SQL queries.
+
+ ##
+ # :singleton-method: ignored_columns=
+ # :call-seq: ignored_columns=(columns)
+ #
+ # Sets the columns names the model should ignore. Ignored columns won't have attribute
+ # accessors defined, and won't be referenced in SQL queries.
+
+ included do
+ mattr_accessor :primary_key_prefix_type, instance_writer: false
+
+ class_attribute :table_name_prefix, instance_writer: false, default: ""
+ class_attribute :table_name_suffix, instance_writer: false, default: ""
+ class_attribute :schema_migrations_table_name, instance_accessor: false, default: "schema_migrations"
+ class_attribute :internal_metadata_table_name, instance_accessor: false, default: "ar_internal_metadata"
+ class_attribute :protected_environments, instance_accessor: false, default: [ "production" ]
+ class_attribute :pluralize_table_names, instance_writer: false, default: true
+ class_attribute :ignored_columns, instance_accessor: false, default: [].freeze
+
+ self.inheritance_column = "type"
+
+ delegate :type_for_attribute, to: :class
+
+ initialize_load_schema_monitor
+ end
+
+ # Derives the join table name for +first_table+ and +second_table+. The
+ # table names appear in alphabetical order. A common prefix is removed
+ # (useful for namespaced models like Music::Artist and Music::Record):
+ #
+ # artists, records => artists_records
+ # records, artists => artists_records
+ # music_artists, music_records => music_artists_records
+ def self.derive_join_table_name(first_table, second_table) # :nodoc:
+ [first_table.to_s, second_table.to_s].sort.join("\0").gsub(/^(.*_)(.+)\0\1(.+)/, '\1\2_\3').tr("\0", "_")
+ end
+
+ module ClassMethods
+ # Guesses the table name (in forced lower-case) based on the name of the class in the
+ # inheritance hierarchy descending directly from ActiveRecord::Base. So if the hierarchy
+ # looks like: Reply < Message < ActiveRecord::Base, then Message is used
+ # to guess the table name even when called on Reply. The rules used to do the guess
+ # are handled by the Inflector class in Active Support, which knows almost all common
+ # English inflections. You can add new inflections in config/initializers/inflections.rb.
+ #
+ # Nested classes are given table names prefixed by the singular form of
+ # the parent's table name. Enclosing modules are not considered.
+ #
+ # ==== Examples
+ #
+ # class Invoice < ActiveRecord::Base
+ # end
+ #
+ # file class table_name
+ # invoice.rb Invoice invoices
+ #
+ # class Invoice < ActiveRecord::Base
+ # class Lineitem < ActiveRecord::Base
+ # end
+ # end
+ #
+ # file class table_name
+ # invoice.rb Invoice::Lineitem invoice_lineitems
+ #
+ # module Invoice
+ # class Lineitem < ActiveRecord::Base
+ # end
+ # end
+ #
+ # file class table_name
+ # invoice/lineitem.rb Invoice::Lineitem lineitems
+ #
+ # Additionally, the class-level +table_name_prefix+ is prepended and the
+ # +table_name_suffix+ is appended. So if you have "myapp_" as a prefix,
+ # the table name guess for an Invoice class becomes "myapp_invoices".
+ # Invoice::Lineitem becomes "myapp_invoice_lineitems".
+ #
+ # You can also set your own table name explicitly:
+ #
+ # class Mouse < ActiveRecord::Base
+ # self.table_name = "mice"
+ # end
+ def table_name
+ reset_table_name unless defined?(@table_name)
+ @table_name
+ end
+
+ # Sets the table name explicitly. Example:
+ #
+ # class Project < ActiveRecord::Base
+ # self.table_name = "project"
+ # end
+ def table_name=(value)
+ value = value && value.to_s
+
+ if defined?(@table_name)
+ return if value == @table_name
+ reset_column_information if connected?
+ end
+
+ @table_name = value
+ @quoted_table_name = nil
+ @arel_table = nil
+ @sequence_name = nil unless defined?(@explicit_sequence_name) && @explicit_sequence_name
+ @predicate_builder = nil
+ end
+
+ # Returns a quoted version of the table name, used to construct SQL statements.
+ def quoted_table_name
+ @quoted_table_name ||= connection.quote_table_name(table_name)
+ end
+
+ # Computes the table name, (re)sets it internally, and returns it.
+ def reset_table_name #:nodoc:
+ self.table_name = if abstract_class?
+ superclass == Base ? nil : superclass.table_name
+ elsif superclass.abstract_class?
+ superclass.table_name || compute_table_name
+ else
+ compute_table_name
+ end
+ end
+
+ def full_table_name_prefix #:nodoc:
+ (parents.detect { |p| p.respond_to?(:table_name_prefix) } || self).table_name_prefix
+ end
+
+ def full_table_name_suffix #:nodoc:
+ (parents.detect { |p| p.respond_to?(:table_name_suffix) } || self).table_name_suffix
+ end
+
+ # Defines the name of the table column which will store the class name on single-table
+ # inheritance situations.
+ #
+ # The default inheritance column name is +type+, which means it's a
+ # reserved word inside Active Record. To be able to use single-table
+ # inheritance with another column name, or to use the column +type+ in
+ # your own model for something else, you can set +inheritance_column+:
+ #
+ # self.inheritance_column = 'zoink'
+ def inheritance_column
+ (@inheritance_column ||= nil) || superclass.inheritance_column
+ end
+
+ # Sets the value of inheritance_column
+ def inheritance_column=(value)
+ @inheritance_column = value.to_s
+ @explicit_inheritance_column = true
+ end
+
+ def sequence_name
+ if base_class == self
+ @sequence_name ||= reset_sequence_name
+ else
+ (@sequence_name ||= nil) || base_class.sequence_name
+ end
+ end
+
+ def reset_sequence_name #:nodoc:
+ @explicit_sequence_name = false
+ @sequence_name = connection.default_sequence_name(table_name, primary_key)
+ end
+
+ # Sets the name of the sequence to use when generating ids to the given
+ # value, or (if the value is +nil+ or +false+) to the value returned by the
+ # given block. This is required for Oracle and is useful for any
+ # database which relies on sequences for primary key generation.
+ #
+ # If a sequence name is not explicitly set when using Oracle,
+ # it will default to the commonly used pattern of: #{table_name}_seq
+ #
+ # If a sequence name is not explicitly set when using PostgreSQL, it
+ # will discover the sequence corresponding to your primary key for you.
+ #
+ # class Project < ActiveRecord::Base
+ # self.sequence_name = "projectseq" # default would have been "project_seq"
+ # end
+ def sequence_name=(value)
+ @sequence_name = value.to_s
+ @explicit_sequence_name = true
+ end
+
+ # Determines if the primary key values should be selected from their
+ # corresponding sequence before the insert statement.
+ def prefetch_primary_key?
+ connection.prefetch_primary_key?(table_name)
+ end
+
+ # Returns the next value that will be used as the primary key on
+ # an insert statement.
+ def next_sequence_value
+ connection.next_sequence_value(sequence_name)
+ end
+
+ # Indicates whether the table associated with this class exists
+ def table_exists?
+ connection.schema_cache.data_source_exists?(table_name)
+ end
+
+ def attributes_builder # :nodoc:
+ @attributes_builder ||= AttributeSet::Builder.new(attribute_types, primary_key) do |name|
+ unless columns_hash.key?(name)
+ _default_attributes[name].dup
+ end
+ end
+ end
+
+ def columns_hash # :nodoc:
+ load_schema
+ @columns_hash
+ end
+
+ def columns
+ load_schema
+ @columns ||= columns_hash.values
+ end
+
+ def attribute_types # :nodoc:
+ load_schema
+ @attribute_types ||= Hash.new(Type.default_value)
+ end
+
+ def yaml_encoder # :nodoc:
+ @yaml_encoder ||= AttributeSet::YAMLEncoder.new(attribute_types)
+ end
+
+ # Returns the type of the attribute with the given name, after applying
+ # all modifiers. This method is the only valid source of information for
+ # anything related to the types of a model's attributes. This method will
+ # access the database and load the model's schema if it is required.
+ #
+ # The return value of this method will implement the interface described
+ # by ActiveModel::Type::Value (though the object itself may not subclass
+ # it).
+ #
+ # +attr_name+ The name of the attribute to retrieve the type for. Must be
+ # a string
+ def type_for_attribute(attr_name, &block)
+ if block
+ attribute_types.fetch(attr_name, &block)
+ else
+ attribute_types[attr_name]
+ end
+ end
+
+ # Returns a hash where the keys are column names and the values are
+ # default values when instantiating the Active Record object for this table.
+ def column_defaults
+ load_schema
+ @column_defaults ||= _default_attributes.to_hash
+ end
+
+ def _default_attributes # :nodoc:
+ @default_attributes ||= AttributeSet.new({})
+ end
+
+ # Returns an array of column names as strings.
+ def column_names
+ @column_names ||= columns.map(&:name)
+ end
+
+ # Returns an array of column objects where the primary id, all columns ending in "_id" or "_count",
+ # and columns used for single table inheritance have been removed.
+ def content_columns
+ @content_columns ||= columns.reject do |c|
+ c.name == primary_key ||
+ c.name == inheritance_column ||
+ c.name.end_with?("_id") ||
+ c.name.end_with?("_count")
+ end
+ end
+
+ # Resets all the cached information about columns, which will cause them
+ # to be reloaded on the next request.
+ #
+ # The most common usage pattern for this method is probably in a migration,
+ # when just after creating a table you want to populate it with some default
+ # values, eg:
+ #
+ # class CreateJobLevels < ActiveRecord::Migration[5.0]
+ # def up
+ # create_table :job_levels do |t|
+ # t.integer :id
+ # t.string :name
+ #
+ # t.timestamps
+ # end
+ #
+ # JobLevel.reset_column_information
+ # %w{assistant executive manager director}.each do |type|
+ # JobLevel.create(name: type)
+ # end
+ # end
+ #
+ # def down
+ # drop_table :job_levels
+ # end
+ # end
+ def reset_column_information
+ connection.clear_cache!
+ undefine_attribute_methods
+ connection.schema_cache.clear_data_source_cache!(table_name)
+
+ reload_schema_from_cache
+ initialize_find_by_cache
+ end
+
+ protected
+
+ def initialize_load_schema_monitor
+ @load_schema_monitor = Monitor.new
+ end
+
+ private
+
+ def inherited(child_class)
+ super
+ child_class.initialize_load_schema_monitor
+ end
+
+ def schema_loaded?
+ defined?(@schema_loaded) && @schema_loaded
+ end
+
+ def load_schema
+ return if schema_loaded?
+ @load_schema_monitor.synchronize do
+ return if defined?(@columns_hash) && @columns_hash
+
+ load_schema!
+
+ @schema_loaded = true
+ end
+ end
+
+ def load_schema!
+ @columns_hash = connection.schema_cache.columns_hash(table_name).except(*ignored_columns)
+ @columns_hash.each do |name, column|
+ define_attribute(
+ name,
+ connection.lookup_cast_type_from_column(column),
+ default: column.default,
+ user_provided_default: false
+ )
+ end
+ end
+
+ def reload_schema_from_cache
+ @arel_table = nil
+ @column_names = nil
+ @attribute_types = nil
+ @content_columns = nil
+ @default_attributes = nil
+ @column_defaults = nil
+ @inheritance_column = nil unless defined?(@explicit_inheritance_column) && @explicit_inheritance_column
+ @attributes_builder = nil
+ @columns = nil
+ @columns_hash = nil
+ @schema_loaded = false
+ @attribute_names = nil
+ @yaml_encoder = nil
+ direct_descendants.each do |descendant|
+ descendant.send(:reload_schema_from_cache)
+ end
+ end
+
+ # Guesses the table name, but does not decorate it with prefix and suffix information.
+ def undecorated_table_name(class_name = base_class.name)
+ table_name = class_name.to_s.demodulize.underscore
+ pluralize_table_names ? table_name.pluralize : table_name
+ end
+
+ # Computes and returns a table name according to default conventions.
+ def compute_table_name
+ base = base_class
+ if self == base
+ # Nested classes are prefixed with singular parent table name.
+ if parent < Base && !parent.abstract_class?
+ contained = parent.table_name
+ contained = contained.singularize if parent.pluralize_table_names
+ contained += "_"
+ end
+
+ "#{full_table_name_prefix}#{contained}#{undecorated_table_name(name)}#{full_table_name_suffix}"
+ else
+ # STI subclasses always use their superclass' table.
+ base.table_name
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/nested_attributes.rb b/activerecord/lib/active_record/nested_attributes.rb
new file mode 100644
index 0000000000..1864ca5ad2
--- /dev/null
+++ b/activerecord/lib/active_record/nested_attributes.rb
@@ -0,0 +1,589 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/hash/except"
+require "active_support/core_ext/object/try"
+require "active_support/core_ext/hash/indifferent_access"
+
+module ActiveRecord
+ module NestedAttributes #:nodoc:
+ class TooManyRecords < ActiveRecordError
+ end
+
+ extend ActiveSupport::Concern
+
+ included do
+ class_attribute :nested_attributes_options, instance_writer: false, default: {}
+ end
+
+ # = Active Record Nested Attributes
+ #
+ # Nested attributes allow you to save attributes on associated records
+ # through the parent. By default nested attribute updating is turned off
+ # and you can enable it using the accepts_nested_attributes_for class
+ # method. When you enable nested attributes an attribute writer is
+ # defined on the model.
+ #
+ # The attribute writer is named after the association, which means that
+ # in the following example, two new methods are added to your model:
+ #
+ # <tt>author_attributes=(attributes)</tt> and
+ # <tt>pages_attributes=(attributes)</tt>.
+ #
+ # class Book < ActiveRecord::Base
+ # has_one :author
+ # has_many :pages
+ #
+ # accepts_nested_attributes_for :author, :pages
+ # end
+ #
+ # Note that the <tt>:autosave</tt> option is automatically enabled on every
+ # association that accepts_nested_attributes_for is used for.
+ #
+ # === One-to-one
+ #
+ # Consider a Member model that has one Avatar:
+ #
+ # class Member < ActiveRecord::Base
+ # has_one :avatar
+ # accepts_nested_attributes_for :avatar
+ # end
+ #
+ # Enabling nested attributes on a one-to-one association allows you to
+ # create the member and avatar in one go:
+ #
+ # params = { member: { name: 'Jack', avatar_attributes: { icon: 'smiling' } } }
+ # member = Member.create(params[:member])
+ # member.avatar.id # => 2
+ # member.avatar.icon # => 'smiling'
+ #
+ # It also allows you to update the avatar through the member:
+ #
+ # params = { member: { avatar_attributes: { id: '2', icon: 'sad' } } }
+ # member.update params[:member]
+ # member.avatar.icon # => 'sad'
+ #
+ # By default you will only be able to set and update attributes on the
+ # associated model. If you want to destroy the associated model through the
+ # attributes hash, you have to enable it first using the
+ # <tt>:allow_destroy</tt> option.
+ #
+ # class Member < ActiveRecord::Base
+ # has_one :avatar
+ # accepts_nested_attributes_for :avatar, allow_destroy: true
+ # end
+ #
+ # Now, when you add the <tt>_destroy</tt> key to the attributes hash, with a
+ # value that evaluates to +true+, you will destroy the associated model:
+ #
+ # member.avatar_attributes = { id: '2', _destroy: '1' }
+ # member.avatar.marked_for_destruction? # => true
+ # member.save
+ # member.reload.avatar # => nil
+ #
+ # Note that the model will _not_ be destroyed until the parent is saved.
+ #
+ # Also note that the model will not be destroyed unless you also specify
+ # its id in the updated hash.
+ #
+ # === One-to-many
+ #
+ # Consider a member that has a number of posts:
+ #
+ # class Member < ActiveRecord::Base
+ # has_many :posts
+ # accepts_nested_attributes_for :posts
+ # end
+ #
+ # You can now set or update attributes on the associated posts through
+ # an attribute hash for a member: include the key +:posts_attributes+
+ # with an array of hashes of post attributes as a value.
+ #
+ # For each hash that does _not_ have an <tt>id</tt> key a new record will
+ # be instantiated, unless the hash also contains a <tt>_destroy</tt> key
+ # that evaluates to +true+.
+ #
+ # params = { member: {
+ # name: 'joe', posts_attributes: [
+ # { title: 'Kari, the awesome Ruby documentation browser!' },
+ # { title: 'The egalitarian assumption of the modern citizen' },
+ # { title: '', _destroy: '1' } # this will be ignored
+ # ]
+ # }}
+ #
+ # member = Member.create(params[:member])
+ # member.posts.length # => 2
+ # member.posts.first.title # => 'Kari, the awesome Ruby documentation browser!'
+ # member.posts.second.title # => 'The egalitarian assumption of the modern citizen'
+ #
+ # You may also set a +:reject_if+ proc to silently ignore any new record
+ # hashes if they fail to pass your criteria. For example, the previous
+ # example could be rewritten as:
+ #
+ # class Member < ActiveRecord::Base
+ # has_many :posts
+ # accepts_nested_attributes_for :posts, reject_if: proc { |attributes| attributes['title'].blank? }
+ # end
+ #
+ # params = { member: {
+ # name: 'joe', posts_attributes: [
+ # { title: 'Kari, the awesome Ruby documentation browser!' },
+ # { title: 'The egalitarian assumption of the modern citizen' },
+ # { title: '' } # this will be ignored because of the :reject_if proc
+ # ]
+ # }}
+ #
+ # member = Member.create(params[:member])
+ # member.posts.length # => 2
+ # member.posts.first.title # => 'Kari, the awesome Ruby documentation browser!'
+ # member.posts.second.title # => 'The egalitarian assumption of the modern citizen'
+ #
+ # Alternatively, +:reject_if+ also accepts a symbol for using methods:
+ #
+ # class Member < ActiveRecord::Base
+ # has_many :posts
+ # accepts_nested_attributes_for :posts, reject_if: :new_record?
+ # end
+ #
+ # class Member < ActiveRecord::Base
+ # has_many :posts
+ # accepts_nested_attributes_for :posts, reject_if: :reject_posts
+ #
+ # def reject_posts(attributes)
+ # attributes['title'].blank?
+ # end
+ # end
+ #
+ # If the hash contains an <tt>id</tt> key that matches an already
+ # associated record, the matching record will be modified:
+ #
+ # member.attributes = {
+ # name: 'Joe',
+ # posts_attributes: [
+ # { id: 1, title: '[UPDATED] An, as of yet, undisclosed awesome Ruby documentation browser!' },
+ # { id: 2, title: '[UPDATED] other post' }
+ # ]
+ # }
+ #
+ # member.posts.first.title # => '[UPDATED] An, as of yet, undisclosed awesome Ruby documentation browser!'
+ # member.posts.second.title # => '[UPDATED] other post'
+ #
+ # However, the above applies if the parent model is being updated as well.
+ # For example, If you wanted to create a +member+ named _joe_ and wanted to
+ # update the +posts+ at the same time, that would give an
+ # ActiveRecord::RecordNotFound error.
+ #
+ # By default the associated records are protected from being destroyed. If
+ # you want to destroy any of the associated records through the attributes
+ # hash, you have to enable it first using the <tt>:allow_destroy</tt>
+ # option. This will allow you to also use the <tt>_destroy</tt> key to
+ # destroy existing records:
+ #
+ # class Member < ActiveRecord::Base
+ # has_many :posts
+ # accepts_nested_attributes_for :posts, allow_destroy: true
+ # end
+ #
+ # params = { member: {
+ # posts_attributes: [{ id: '2', _destroy: '1' }]
+ # }}
+ #
+ # member.attributes = params[:member]
+ # member.posts.detect { |p| p.id == 2 }.marked_for_destruction? # => true
+ # member.posts.length # => 2
+ # member.save
+ # member.reload.posts.length # => 1
+ #
+ # Nested attributes for an associated collection can also be passed in
+ # the form of a hash of hashes instead of an array of hashes:
+ #
+ # Member.create(
+ # name: 'joe',
+ # posts_attributes: {
+ # first: { title: 'Foo' },
+ # second: { title: 'Bar' }
+ # }
+ # )
+ #
+ # has the same effect as
+ #
+ # Member.create(
+ # name: 'joe',
+ # posts_attributes: [
+ # { title: 'Foo' },
+ # { title: 'Bar' }
+ # ]
+ # )
+ #
+ # The keys of the hash which is the value for +:posts_attributes+ are
+ # ignored in this case.
+ # However, it is not allowed to use <tt>'id'</tt> or <tt>:id</tt> for one of
+ # such keys, otherwise the hash will be wrapped in an array and
+ # interpreted as an attribute hash for a single post.
+ #
+ # Passing attributes for an associated collection in the form of a hash
+ # of hashes can be used with hashes generated from HTTP/HTML parameters,
+ # where there may be no natural way to submit an array of hashes.
+ #
+ # === Saving
+ #
+ # All changes to models, including the destruction of those marked for
+ # destruction, are saved and destroyed automatically and atomically when
+ # the parent model is saved. This happens inside the transaction initiated
+ # by the parent's save method. See ActiveRecord::AutosaveAssociation.
+ #
+ # === Validating the presence of a parent model
+ #
+ # If you want to validate that a child record is associated with a parent
+ # record, you can use the +validates_presence_of+ method and the +:inverse_of+
+ # key as this example illustrates:
+ #
+ # class Member < ActiveRecord::Base
+ # has_many :posts, inverse_of: :member
+ # accepts_nested_attributes_for :posts
+ # end
+ #
+ # class Post < ActiveRecord::Base
+ # belongs_to :member, inverse_of: :posts
+ # validates_presence_of :member
+ # end
+ #
+ # Note that if you do not specify the +:inverse_of+ option, then
+ # Active Record will try to automatically guess the inverse association
+ # based on heuristics.
+ #
+ # For one-to-one nested associations, if you build the new (in-memory)
+ # child object yourself before assignment, then this module will not
+ # overwrite it, e.g.:
+ #
+ # class Member < ActiveRecord::Base
+ # has_one :avatar
+ # accepts_nested_attributes_for :avatar
+ #
+ # def avatar
+ # super || build_avatar(width: 200)
+ # end
+ # end
+ #
+ # member = Member.new
+ # member.avatar_attributes = {icon: 'sad'}
+ # member.avatar.width # => 200
+ module ClassMethods
+ REJECT_ALL_BLANK_PROC = proc { |attributes| attributes.all? { |key, value| key == "_destroy" || value.blank? } }
+
+ # Defines an attributes writer for the specified association(s).
+ #
+ # Supported options:
+ # [:allow_destroy]
+ # If true, destroys any members from the attributes hash with a
+ # <tt>_destroy</tt> key and a value that evaluates to +true+
+ # (eg. 1, '1', true, or 'true'). This option is off by default.
+ # [:reject_if]
+ # Allows you to specify a Proc or a Symbol pointing to a method
+ # that checks whether a record should be built for a certain attribute
+ # hash. The hash is passed to the supplied Proc or the method
+ # and it should return either +true+ or +false+. When no +:reject_if+
+ # is specified, a record will be built for all attribute hashes that
+ # do not have a <tt>_destroy</tt> value that evaluates to true.
+ # Passing <tt>:all_blank</tt> instead of a Proc will create a proc
+ # that will reject a record where all the attributes are blank excluding
+ # any value for +_destroy+.
+ # [:limit]
+ # Allows you to specify the maximum number of associated records that
+ # can be processed with the nested attributes. Limit also can be specified
+ # as a Proc or a Symbol pointing to a method that should return a number.
+ # If the size of the nested attributes array exceeds the specified limit,
+ # NestedAttributes::TooManyRecords exception is raised. If omitted, any
+ # number of associations can be processed.
+ # Note that the +:limit+ option is only applicable to one-to-many
+ # associations.
+ # [:update_only]
+ # For a one-to-one association, this option allows you to specify how
+ # nested attributes are going to be used when an associated record already
+ # exists. In general, an existing record may either be updated with the
+ # new set of attribute values or be replaced by a wholly new record
+ # containing those values. By default the +:update_only+ option is +false+
+ # and the nested attributes are used to update the existing record only
+ # if they include the record's <tt>:id</tt> value. Otherwise a new
+ # record will be instantiated and used to replace the existing one.
+ # However if the +:update_only+ option is +true+, the nested attributes
+ # are used to update the record's attributes always, regardless of
+ # whether the <tt>:id</tt> is present. The option is ignored for collection
+ # associations.
+ #
+ # Examples:
+ # # creates avatar_attributes=
+ # accepts_nested_attributes_for :avatar, reject_if: proc { |attributes| attributes['name'].blank? }
+ # # creates avatar_attributes=
+ # accepts_nested_attributes_for :avatar, reject_if: :all_blank
+ # # creates avatar_attributes= and posts_attributes=
+ # accepts_nested_attributes_for :avatar, :posts, allow_destroy: true
+ def accepts_nested_attributes_for(*attr_names)
+ options = { allow_destroy: false, update_only: false }
+ options.update(attr_names.extract_options!)
+ options.assert_valid_keys(:allow_destroy, :reject_if, :limit, :update_only)
+ options[:reject_if] = REJECT_ALL_BLANK_PROC if options[:reject_if] == :all_blank
+
+ attr_names.each do |association_name|
+ if reflection = _reflect_on_association(association_name)
+ reflection.autosave = true
+ define_autosave_validation_callbacks(reflection)
+
+ nested_attributes_options = self.nested_attributes_options.dup
+ nested_attributes_options[association_name.to_sym] = options
+ self.nested_attributes_options = nested_attributes_options
+
+ type = (reflection.collection? ? :collection : :one_to_one)
+ generate_association_writer(association_name, type)
+ else
+ raise ArgumentError, "No association found for name `#{association_name}'. Has it been defined yet?"
+ end
+ end
+ end
+
+ private
+
+ # Generates a writer method for this association. Serves as a point for
+ # accessing the objects in the association. For example, this method
+ # could generate the following:
+ #
+ # def pirate_attributes=(attributes)
+ # assign_nested_attributes_for_one_to_one_association(:pirate, attributes)
+ # end
+ #
+ # This redirects the attempts to write objects in an association through
+ # the helper methods defined below. Makes it seem like the nested
+ # associations are just regular associations.
+ def generate_association_writer(association_name, type)
+ generated_association_methods.module_eval <<-eoruby, __FILE__, __LINE__ + 1
+ if method_defined?(:#{association_name}_attributes=)
+ remove_method(:#{association_name}_attributes=)
+ end
+ def #{association_name}_attributes=(attributes)
+ assign_nested_attributes_for_#{type}_association(:#{association_name}, attributes)
+ end
+ eoruby
+ end
+ end
+
+ # Returns ActiveRecord::AutosaveAssociation::marked_for_destruction? It's
+ # used in conjunction with fields_for to build a form element for the
+ # destruction of this association.
+ #
+ # See ActionView::Helpers::FormHelper::fields_for for more info.
+ def _destroy
+ marked_for_destruction?
+ end
+
+ private
+
+ # Attribute hash keys that should not be assigned as normal attributes.
+ # These hash keys are nested attributes implementation details.
+ UNASSIGNABLE_KEYS = %w( id _destroy )
+
+ # Assigns the given attributes to the association.
+ #
+ # If an associated record does not yet exist, one will be instantiated. If
+ # an associated record already exists, the method's behavior depends on
+ # the value of the update_only option. If update_only is +false+ and the
+ # given attributes include an <tt>:id</tt> that matches the existing record's
+ # id, then the existing record will be modified. If no <tt>:id</tt> is provided
+ # it will be replaced with a new record. If update_only is +true+ the existing
+ # record will be modified regardless of whether an <tt>:id</tt> is provided.
+ #
+ # If the given attributes include a matching <tt>:id</tt> attribute, or
+ # update_only is true, and a <tt>:_destroy</tt> key set to a truthy value,
+ # then the existing record will be marked for destruction.
+ def assign_nested_attributes_for_one_to_one_association(association_name, attributes)
+ options = nested_attributes_options[association_name]
+ if attributes.respond_to?(:permitted?)
+ attributes = attributes.to_h
+ end
+ attributes = attributes.with_indifferent_access
+ existing_record = send(association_name)
+
+ if (options[:update_only] || !attributes["id"].blank?) && existing_record &&
+ (options[:update_only] || existing_record.id.to_s == attributes["id"].to_s)
+ assign_to_or_mark_for_destruction(existing_record, attributes, options[:allow_destroy]) unless call_reject_if(association_name, attributes)
+
+ elsif attributes["id"].present?
+ raise_nested_attributes_record_not_found!(association_name, attributes["id"])
+
+ elsif !reject_new_record?(association_name, attributes)
+ assignable_attributes = attributes.except(*UNASSIGNABLE_KEYS)
+
+ if existing_record && existing_record.new_record?
+ existing_record.assign_attributes(assignable_attributes)
+ association(association_name).initialize_attributes(existing_record)
+ else
+ method = "build_#{association_name}"
+ if respond_to?(method)
+ send(method, assignable_attributes)
+ else
+ raise ArgumentError, "Cannot build association `#{association_name}'. Are you trying to build a polymorphic one-to-one association?"
+ end
+ end
+ end
+ end
+
+ # Assigns the given attributes to the collection association.
+ #
+ # Hashes with an <tt>:id</tt> value matching an existing associated record
+ # will update that record. Hashes without an <tt>:id</tt> value will build
+ # a new record for the association. Hashes with a matching <tt>:id</tt>
+ # value and a <tt>:_destroy</tt> key set to a truthy value will mark the
+ # matched record for destruction.
+ #
+ # For example:
+ #
+ # assign_nested_attributes_for_collection_association(:people, {
+ # '1' => { id: '1', name: 'Peter' },
+ # '2' => { name: 'John' },
+ # '3' => { id: '2', _destroy: true }
+ # })
+ #
+ # Will update the name of the Person with ID 1, build a new associated
+ # person with the name 'John', and mark the associated Person with ID 2
+ # for destruction.
+ #
+ # Also accepts an Array of attribute hashes:
+ #
+ # assign_nested_attributes_for_collection_association(:people, [
+ # { id: '1', name: 'Peter' },
+ # { name: 'John' },
+ # { id: '2', _destroy: true }
+ # ])
+ def assign_nested_attributes_for_collection_association(association_name, attributes_collection)
+ options = nested_attributes_options[association_name]
+ if attributes_collection.respond_to?(:permitted?)
+ attributes_collection = attributes_collection.to_h
+ end
+
+ unless attributes_collection.is_a?(Hash) || attributes_collection.is_a?(Array)
+ raise ArgumentError, "Hash or Array expected for attribute `#{association_name}`, got #{attributes_collection.class.name} (#{attributes_collection.inspect})"
+ end
+
+ check_record_limit!(options[:limit], attributes_collection)
+
+ if attributes_collection.is_a? Hash
+ keys = attributes_collection.keys
+ attributes_collection = if keys.include?("id") || keys.include?(:id)
+ [attributes_collection]
+ else
+ attributes_collection.values
+ end
+ end
+
+ association = association(association_name)
+
+ existing_records = if association.loaded?
+ association.target
+ else
+ attribute_ids = attributes_collection.map { |a| a["id"] || a[:id] }.compact
+ attribute_ids.empty? ? [] : association.scope.where(association.klass.primary_key => attribute_ids)
+ end
+
+ attributes_collection.each do |attributes|
+ if attributes.respond_to?(:permitted?)
+ attributes = attributes.to_h
+ end
+ attributes = attributes.with_indifferent_access
+
+ if attributes["id"].blank?
+ unless reject_new_record?(association_name, attributes)
+ association.build(attributes.except(*UNASSIGNABLE_KEYS))
+ end
+ elsif existing_record = existing_records.detect { |record| record.id.to_s == attributes["id"].to_s }
+ unless call_reject_if(association_name, attributes)
+ # Make sure we are operating on the actual object which is in the association's
+ # proxy_target array (either by finding it, or adding it if not found)
+ # Take into account that the proxy_target may have changed due to callbacks
+ target_record = association.target.detect { |record| record.id.to_s == attributes["id"].to_s }
+ if target_record
+ existing_record = target_record
+ else
+ association.add_to_target(existing_record, :skip_callbacks)
+ end
+
+ assign_to_or_mark_for_destruction(existing_record, attributes, options[:allow_destroy])
+ end
+ else
+ raise_nested_attributes_record_not_found!(association_name, attributes["id"])
+ end
+ end
+ end
+
+ # Takes in a limit and checks if the attributes_collection has too many
+ # records. It accepts limit in the form of symbol, proc, or
+ # number-like object (anything that can be compared with an integer).
+ #
+ # Raises TooManyRecords error if the attributes_collection is
+ # larger than the limit.
+ def check_record_limit!(limit, attributes_collection)
+ if limit
+ limit = \
+ case limit
+ when Symbol
+ send(limit)
+ when Proc
+ limit.call
+ else
+ limit
+ end
+
+ if limit && attributes_collection.size > limit
+ raise TooManyRecords, "Maximum #{limit} records are allowed. Got #{attributes_collection.size} records instead."
+ end
+ end
+ end
+
+ # Updates a record with the +attributes+ or marks it for destruction if
+ # +allow_destroy+ is +true+ and has_destroy_flag? returns +true+.
+ def assign_to_or_mark_for_destruction(record, attributes, allow_destroy)
+ record.assign_attributes(attributes.except(*UNASSIGNABLE_KEYS))
+ record.mark_for_destruction if has_destroy_flag?(attributes) && allow_destroy
+ end
+
+ # Determines if a hash contains a truthy _destroy key.
+ def has_destroy_flag?(hash)
+ Type::Boolean.new.cast(hash["_destroy"])
+ end
+
+ # Determines if a new record should be rejected by checking
+ # has_destroy_flag? or if a <tt>:reject_if</tt> proc exists for this
+ # association and evaluates to +true+.
+ def reject_new_record?(association_name, attributes)
+ will_be_destroyed?(association_name, attributes) || call_reject_if(association_name, attributes)
+ end
+
+ # Determines if a record with the particular +attributes+ should be
+ # rejected by calling the reject_if Symbol or Proc (if defined).
+ # The reject_if option is defined by +accepts_nested_attributes_for+.
+ #
+ # Returns false if there is a +destroy_flag+ on the attributes.
+ def call_reject_if(association_name, attributes)
+ return false if will_be_destroyed?(association_name, attributes)
+
+ case callback = nested_attributes_options[association_name][:reject_if]
+ when Symbol
+ method(callback).arity == 0 ? send(callback) : send(callback, attributes)
+ when Proc
+ callback.call(attributes)
+ end
+ end
+
+ # Only take into account the destroy flag if <tt>:allow_destroy</tt> is true
+ def will_be_destroyed?(association_name, attributes)
+ allow_destroy?(association_name) && has_destroy_flag?(attributes)
+ end
+
+ def allow_destroy?(association_name)
+ nested_attributes_options[association_name][:allow_destroy]
+ end
+
+ def raise_nested_attributes_record_not_found!(association_name, record_id)
+ model = self.class._reflect_on_association(association_name).klass.name
+ raise RecordNotFound.new("Couldn't find #{model} with ID=#{record_id} for #{self.class.name} with ID=#{id}",
+ model, "id", record_id)
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/no_touching.rb b/activerecord/lib/active_record/no_touching.rb
new file mode 100644
index 0000000000..c573deb63a
--- /dev/null
+++ b/activerecord/lib/active_record/no_touching.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record No Touching
+ module NoTouching
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ # Lets you selectively disable calls to `touch` for the
+ # duration of a block.
+ #
+ # ==== Examples
+ # ActiveRecord::Base.no_touching do
+ # Project.first.touch # does nothing
+ # Message.first.touch # does nothing
+ # end
+ #
+ # Project.no_touching do
+ # Project.first.touch # does nothing
+ # Message.first.touch # works, but does not touch the associated project
+ # end
+ #
+ def no_touching(&block)
+ NoTouching.apply_to(self, &block)
+ end
+ end
+
+ class << self
+ def apply_to(klass) #:nodoc:
+ klasses.push(klass)
+ yield
+ ensure
+ klasses.pop
+ end
+
+ def applied_to?(klass) #:nodoc:
+ klasses.any? { |k| k >= klass }
+ end
+
+ private
+ def klasses
+ Thread.current[:no_touching_classes] ||= []
+ end
+ end
+
+ def no_touching?
+ NoTouching.applied_to?(self.class)
+ end
+
+ def touch_later(*) # :nodoc:
+ super unless no_touching?
+ end
+
+ def touch(*) # :nodoc:
+ super unless no_touching?
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/null_relation.rb b/activerecord/lib/active_record/null_relation.rb
new file mode 100644
index 0000000000..cf0de0fdeb
--- /dev/null
+++ b/activerecord/lib/active_record/null_relation.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module NullRelation # :nodoc:
+ def pluck(*column_names)
+ []
+ end
+
+ def delete_all
+ 0
+ end
+
+ def update_all(_updates)
+ 0
+ end
+
+ def delete(_id_or_array)
+ 0
+ end
+
+ def empty?
+ true
+ end
+
+ def none?
+ true
+ end
+
+ def any?
+ false
+ end
+
+ def one?
+ false
+ end
+
+ def many?
+ false
+ end
+
+ def to_sql
+ ""
+ end
+
+ def calculate(operation, _column_name)
+ case operation
+ when :count, :sum
+ group_values.any? ? Hash.new : 0
+ when :average, :minimum, :maximum
+ group_values.any? ? Hash.new : nil
+ end
+ end
+
+ def exists?(_conditions = :none)
+ false
+ end
+
+ def or(other)
+ other.spawn
+ end
+
+ private
+
+ def exec_queries
+ @records = [].freeze
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/persistence.rb b/activerecord/lib/active_record/persistence.rb
new file mode 100644
index 0000000000..1297e0cde7
--- /dev/null
+++ b/activerecord/lib/active_record/persistence.rb
@@ -0,0 +1,623 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record \Persistence
+ module Persistence
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ # Creates an object (or multiple objects) and saves it to the database, if validations pass.
+ # The resulting object is returned whether the object was saved successfully to the database or not.
+ #
+ # The +attributes+ parameter can be either a Hash or an Array of Hashes. These Hashes describe the
+ # attributes on the objects that are to be created.
+ #
+ # ==== Examples
+ # # Create a single new object
+ # User.create(first_name: 'Jamie')
+ #
+ # # Create an Array of new objects
+ # User.create([{ first_name: 'Jamie' }, { first_name: 'Jeremy' }])
+ #
+ # # Create a single object and pass it into a block to set other attributes.
+ # User.create(first_name: 'Jamie') do |u|
+ # u.is_admin = false
+ # end
+ #
+ # # Creating an Array of new objects using a block, where the block is executed for each object:
+ # User.create([{ first_name: 'Jamie' }, { first_name: 'Jeremy' }]) do |u|
+ # u.is_admin = false
+ # end
+ def create(attributes = nil, &block)
+ if attributes.is_a?(Array)
+ attributes.collect { |attr| create(attr, &block) }
+ else
+ object = new(attributes, &block)
+ object.save
+ object
+ end
+ end
+
+ # Creates an object (or multiple objects) and saves it to the database,
+ # if validations pass. Raises a RecordInvalid error if validations fail,
+ # unlike Base#create.
+ #
+ # The +attributes+ parameter can be either a Hash or an Array of Hashes.
+ # These describe which attributes to be created on the object, or
+ # multiple objects when given an Array of Hashes.
+ def create!(attributes = nil, &block)
+ if attributes.is_a?(Array)
+ attributes.collect { |attr| create!(attr, &block) }
+ else
+ object = new(attributes, &block)
+ object.save!
+ object
+ end
+ end
+
+ # Given an attributes hash, +instantiate+ returns a new instance of
+ # the appropriate class. Accepts only keys as strings.
+ #
+ # For example, +Post.all+ may return Comments, Messages, and Emails
+ # by storing the record's subclass in a +type+ attribute. By calling
+ # +instantiate+ instead of +new+, finder methods ensure they get new
+ # instances of the appropriate class for each record.
+ #
+ # See <tt>ActiveRecord::Inheritance#discriminate_class_for_record</tt> to see
+ # how this "single-table" inheritance mapping is implemented.
+ def instantiate(attributes, column_types = {}, &block)
+ klass = discriminate_class_for_record(attributes)
+ attributes = klass.attributes_builder.build_from_database(attributes, column_types)
+ klass.allocate.init_with("attributes" => attributes, "new_record" => false, &block)
+ end
+
+ private
+ # Called by +instantiate+ to decide which class to use for a new
+ # record instance.
+ #
+ # See +ActiveRecord::Inheritance#discriminate_class_for_record+ for
+ # the single-table inheritance discriminator.
+ def discriminate_class_for_record(record)
+ self
+ end
+ end
+
+ # Returns true if this object hasn't been saved yet -- that is, a record
+ # for the object doesn't exist in the database yet; otherwise, returns false.
+ def new_record?
+ sync_with_transaction_state
+ @new_record
+ end
+
+ # Returns true if this object has been destroyed, otherwise returns false.
+ def destroyed?
+ sync_with_transaction_state
+ @destroyed
+ end
+
+ # Returns true if the record is persisted, i.e. it's not a new record and it was
+ # not destroyed, otherwise returns false.
+ def persisted?
+ sync_with_transaction_state
+ !(@new_record || @destroyed)
+ end
+
+ ##
+ # :call-seq:
+ # save(*args)
+ #
+ # Saves the model.
+ #
+ # If the model is new, a record gets created in the database, otherwise
+ # the existing record gets updated.
+ #
+ # By default, save always runs validations. If any of them fail the action
+ # is cancelled and #save returns +false+, and the record won't be saved. However, if you supply
+ # <tt>validate: false</tt>, validations are bypassed altogether. See
+ # ActiveRecord::Validations for more information.
+ #
+ # By default, #save also sets the +updated_at+/+updated_on+ attributes to
+ # the current time. However, if you supply <tt>touch: false</tt>, these
+ # timestamps will not be updated.
+ #
+ # There's a series of callbacks associated with #save. If any of the
+ # <tt>before_*</tt> callbacks throws +:abort+ the action is cancelled and
+ # #save returns +false+. See ActiveRecord::Callbacks for further
+ # details.
+ #
+ # Attributes marked as readonly are silently ignored if the record is
+ # being updated.
+ def save(*args, &block)
+ create_or_update(*args, &block)
+ rescue ActiveRecord::RecordInvalid
+ false
+ end
+
+ ##
+ # :call-seq:
+ # save!(*args)
+ #
+ # Saves the model.
+ #
+ # If the model is new, a record gets created in the database, otherwise
+ # the existing record gets updated.
+ #
+ # By default, #save! always runs validations. If any of them fail
+ # ActiveRecord::RecordInvalid gets raised, and the record won't be saved. However, if you supply
+ # <tt>validate: false</tt>, validations are bypassed altogether. See
+ # ActiveRecord::Validations for more information.
+ #
+ # By default, #save! also sets the +updated_at+/+updated_on+ attributes to
+ # the current time. However, if you supply <tt>touch: false</tt>, these
+ # timestamps will not be updated.
+ #
+ # There's a series of callbacks associated with #save!. If any of
+ # the <tt>before_*</tt> callbacks throws +:abort+ the action is cancelled
+ # and #save! raises ActiveRecord::RecordNotSaved. See
+ # ActiveRecord::Callbacks for further details.
+ #
+ # Attributes marked as readonly are silently ignored if the record is
+ # being updated.
+ #
+ # Unless an error is raised, returns true.
+ def save!(*args, &block)
+ create_or_update(*args, &block) || raise(RecordNotSaved.new("Failed to save the record", self))
+ end
+
+ # Deletes the record in the database and freezes this instance to
+ # reflect that no changes should be made (since they can't be
+ # persisted). Returns the frozen instance.
+ #
+ # The row is simply removed with an SQL +DELETE+ statement on the
+ # record's primary key, and no callbacks are executed.
+ #
+ # Note that this will also delete records marked as {#readonly?}[rdoc-ref:Core#readonly?].
+ #
+ # To enforce the object's +before_destroy+ and +after_destroy+
+ # callbacks or any <tt>:dependent</tt> association
+ # options, use <tt>#destroy</tt>.
+ def delete
+ _relation_for_itself.delete_all if persisted?
+ @destroyed = true
+ freeze
+ end
+
+ # Deletes the record in the database and freezes this instance to reflect
+ # that no changes should be made (since they can't be persisted).
+ #
+ # There's a series of callbacks associated with #destroy. If the
+ # <tt>before_destroy</tt> callback throws +:abort+ the action is cancelled
+ # and #destroy returns +false+.
+ # See ActiveRecord::Callbacks for further details.
+ def destroy
+ _raise_readonly_record_error if readonly?
+ destroy_associations
+ self.class.connection.add_transaction_record(self)
+ @_trigger_destroy_callback = if persisted?
+ destroy_row > 0
+ else
+ true
+ end
+ @destroyed = true
+ freeze
+ end
+
+ # Deletes the record in the database and freezes this instance to reflect
+ # that no changes should be made (since they can't be persisted).
+ #
+ # There's a series of callbacks associated with #destroy!. If the
+ # <tt>before_destroy</tt> callback throws +:abort+ the action is cancelled
+ # and #destroy! raises ActiveRecord::RecordNotDestroyed.
+ # See ActiveRecord::Callbacks for further details.
+ def destroy!
+ destroy || _raise_record_not_destroyed
+ end
+
+ # Returns an instance of the specified +klass+ with the attributes of the
+ # current record. This is mostly useful in relation to single-table
+ # inheritance structures where you want a subclass to appear as the
+ # superclass. This can be used along with record identification in
+ # Action Pack to allow, say, <tt>Client < Company</tt> to do something
+ # like render <tt>partial: @client.becomes(Company)</tt> to render that
+ # instance using the companies/company partial instead of clients/client.
+ #
+ # Note: The new instance will share a link to the same attributes as the original class.
+ # Therefore the sti column value will still be the same.
+ # Any change to the attributes on either instance will affect both instances.
+ # If you want to change the sti column as well, use #becomes! instead.
+ def becomes(klass)
+ became = klass.new
+ became.instance_variable_set("@attributes", @attributes)
+ became.instance_variable_set("@mutation_tracker", @mutation_tracker) if defined?(@mutation_tracker)
+ became.instance_variable_set("@changed_attributes", attributes_changed_by_setter)
+ became.instance_variable_set("@new_record", new_record?)
+ became.instance_variable_set("@destroyed", destroyed?)
+ became.errors.copy!(errors)
+ became
+ end
+
+ # Wrapper around #becomes that also changes the instance's sti column value.
+ # This is especially useful if you want to persist the changed class in your
+ # database.
+ #
+ # Note: The old instance's sti column value will be changed too, as both objects
+ # share the same set of attributes.
+ def becomes!(klass)
+ became = becomes(klass)
+ sti_type = nil
+ if !klass.descends_from_active_record?
+ sti_type = klass.sti_name
+ end
+ became.public_send("#{klass.inheritance_column}=", sti_type)
+ became
+ end
+
+ # Updates a single attribute and saves the record.
+ # This is especially useful for boolean flags on existing records. Also note that
+ #
+ # * Validation is skipped.
+ # * \Callbacks are invoked.
+ # * updated_at/updated_on column is updated if that column is available.
+ # * Updates all the attributes that are dirty in this object.
+ #
+ # This method raises an ActiveRecord::ActiveRecordError if the
+ # attribute is marked as readonly.
+ #
+ # Also see #update_column.
+ def update_attribute(name, value)
+ name = name.to_s
+ verify_readonly_attribute(name)
+ public_send("#{name}=", value)
+
+ if has_changes_to_save?
+ save(validate: false)
+ else
+ true
+ end
+ end
+
+ # Updates the attributes of the model from the passed-in hash and saves the
+ # record, all wrapped in a transaction. If the object is invalid, the saving
+ # will fail and false will be returned.
+ def update(attributes)
+ # The following transaction covers any possible database side-effects of the
+ # attributes assignment. For example, setting the IDs of a child collection.
+ with_transaction_returning_status do
+ assign_attributes(attributes)
+ save
+ end
+ end
+
+ alias update_attributes update
+
+ # Updates its receiver just like #update but calls #save! instead
+ # of +save+, so an exception is raised if the record is invalid and saving will fail.
+ def update!(attributes)
+ # The following transaction covers any possible database side-effects of the
+ # attributes assignment. For example, setting the IDs of a child collection.
+ with_transaction_returning_status do
+ assign_attributes(attributes)
+ save!
+ end
+ end
+
+ alias update_attributes! update!
+
+ # Equivalent to <code>update_columns(name => value)</code>.
+ def update_column(name, value)
+ update_columns(name => value)
+ end
+
+ # Updates the attributes directly in the database issuing an UPDATE SQL
+ # statement and sets them in the receiver:
+ #
+ # user.update_columns(last_request_at: Time.current)
+ #
+ # This is the fastest way to update attributes because it goes straight to
+ # the database, but take into account that in consequence the regular update
+ # procedures are totally bypassed. In particular:
+ #
+ # * \Validations are skipped.
+ # * \Callbacks are skipped.
+ # * +updated_at+/+updated_on+ are not updated.
+ # * However, attributes are serialized with the same rules as ActiveRecord::Relation#update_all
+ #
+ # This method raises an ActiveRecord::ActiveRecordError when called on new
+ # objects, or when at least one of the attributes is marked as readonly.
+ def update_columns(attributes)
+ raise ActiveRecordError, "cannot update a new record" if new_record?
+ raise ActiveRecordError, "cannot update a destroyed record" if destroyed?
+
+ attributes.each_key do |key|
+ verify_readonly_attribute(key.to_s)
+ end
+
+ updated_count = _relation_for_itself.update_all(attributes)
+
+ attributes.each do |k, v|
+ write_attribute_without_type_cast(k, v)
+ end
+
+ updated_count == 1
+ end
+
+ # Initializes +attribute+ to zero if +nil+ and adds the value passed as +by+ (default is 1).
+ # The increment is performed directly on the underlying attribute, no setter is invoked.
+ # Only makes sense for number-based attributes. Returns +self+.
+ def increment(attribute, by = 1)
+ self[attribute] ||= 0
+ self[attribute] += by
+ self
+ end
+
+ # Wrapper around #increment that writes the update to the database.
+ # Only +attribute+ is updated; the record itself is not saved.
+ # This means that any other modified attributes will still be dirty.
+ # Validations and callbacks are skipped. Supports the `touch` option from
+ # +update_counters+, see that for more.
+ # Returns +self+.
+ def increment!(attribute, by = 1, touch: nil)
+ increment(attribute, by)
+ change = public_send(attribute) - (attribute_in_database(attribute.to_s) || 0)
+ self.class.update_counters(id, attribute => change, touch: touch)
+ clear_attribute_change(attribute) # eww
+ self
+ end
+
+ # Initializes +attribute+ to zero if +nil+ and subtracts the value passed as +by+ (default is 1).
+ # The decrement is performed directly on the underlying attribute, no setter is invoked.
+ # Only makes sense for number-based attributes. Returns +self+.
+ def decrement(attribute, by = 1)
+ increment(attribute, -by)
+ end
+
+ # Wrapper around #decrement that writes the update to the database.
+ # Only +attribute+ is updated; the record itself is not saved.
+ # This means that any other modified attributes will still be dirty.
+ # Validations and callbacks are skipped. Supports the `touch` option from
+ # +update_counters+, see that for more.
+ # Returns +self+.
+ def decrement!(attribute, by = 1, touch: nil)
+ increment!(attribute, -by, touch: touch)
+ end
+
+ # Assigns to +attribute+ the boolean opposite of <tt>attribute?</tt>. So
+ # if the predicate returns +true+ the attribute will become +false+. This
+ # method toggles directly the underlying value without calling any setter.
+ # Returns +self+.
+ #
+ # Example:
+ #
+ # user = User.first
+ # user.banned? # => false
+ # user.toggle(:banned)
+ # user.banned? # => true
+ #
+ def toggle(attribute)
+ self[attribute] = !public_send("#{attribute}?")
+ self
+ end
+
+ # Wrapper around #toggle that saves the record. This method differs from
+ # its non-bang version in the sense that it passes through the attribute setter.
+ # Saving is not subjected to validation checks. Returns +true+ if the
+ # record could be saved.
+ def toggle!(attribute)
+ toggle(attribute).update_attribute(attribute, self[attribute])
+ end
+
+ # Reloads the record from the database.
+ #
+ # This method finds the record by its primary key (which could be assigned
+ # manually) and modifies the receiver in-place:
+ #
+ # account = Account.new
+ # # => #<Account id: nil, email: nil>
+ # account.id = 1
+ # account.reload
+ # # Account Load (1.2ms) SELECT "accounts".* FROM "accounts" WHERE "accounts"."id" = $1 LIMIT 1 [["id", 1]]
+ # # => #<Account id: 1, email: 'account@example.com'>
+ #
+ # Attributes are reloaded from the database, and caches busted, in
+ # particular the associations cache and the QueryCache.
+ #
+ # If the record no longer exists in the database ActiveRecord::RecordNotFound
+ # is raised. Otherwise, in addition to the in-place modification the method
+ # returns +self+ for convenience.
+ #
+ # The optional <tt>:lock</tt> flag option allows you to lock the reloaded record:
+ #
+ # reload(lock: true) # reload with pessimistic locking
+ #
+ # Reloading is commonly used in test suites to test something is actually
+ # written to the database, or when some action modifies the corresponding
+ # row in the database but not the object in memory:
+ #
+ # assert account.deposit!(25)
+ # assert_equal 25, account.credit # check it is updated in memory
+ # assert_equal 25, account.reload.credit # check it is also persisted
+ #
+ # Another common use case is optimistic locking handling:
+ #
+ # def with_optimistic_retry
+ # begin
+ # yield
+ # rescue ActiveRecord::StaleObjectError
+ # begin
+ # # Reload lock_version in particular.
+ # reload
+ # rescue ActiveRecord::RecordNotFound
+ # # If the record is gone there is nothing to do.
+ # else
+ # retry
+ # end
+ # end
+ # end
+ #
+ def reload(options = nil)
+ self.class.connection.clear_query_cache
+
+ fresh_object =
+ if options && options[:lock]
+ self.class.unscoped { self.class.lock(options[:lock]).find(id) }
+ else
+ self.class.unscoped { self.class.find(id) }
+ end
+
+ @attributes = fresh_object.instance_variable_get("@attributes")
+ @new_record = false
+ self
+ end
+
+ # Saves the record with the updated_at/on attributes set to the current time
+ # or the time specified.
+ # Please note that no validation is performed and only the +after_touch+,
+ # +after_commit+ and +after_rollback+ callbacks are executed.
+ #
+ # This method can be passed attribute names and an optional time argument.
+ # If attribute names are passed, they are updated along with updated_at/on
+ # attributes. If no time argument is passed, the current time is used as default.
+ #
+ # product.touch # updates updated_at/on with current time
+ # product.touch(time: Time.new(2015, 2, 16, 0, 0, 0)) # updates updated_at/on with specified time
+ # product.touch(:designed_at) # updates the designed_at attribute and updated_at/on
+ # product.touch(:started_at, :ended_at) # updates started_at, ended_at and updated_at/on attributes
+ #
+ # If used along with {belongs_to}[rdoc-ref:Associations::ClassMethods#belongs_to]
+ # then +touch+ will invoke +touch+ method on associated object.
+ #
+ # class Brake < ActiveRecord::Base
+ # belongs_to :car, touch: true
+ # end
+ #
+ # class Car < ActiveRecord::Base
+ # belongs_to :corporation, touch: true
+ # end
+ #
+ # # triggers @brake.car.touch and @brake.car.corporation.touch
+ # @brake.touch
+ #
+ # Note that +touch+ must be used on a persisted object, or else an
+ # ActiveRecordError will be thrown. For example:
+ #
+ # ball = Ball.new
+ # ball.touch(:updated_at) # => raises ActiveRecordError
+ #
+ def touch(*names, time: nil)
+ unless persisted?
+ raise ActiveRecordError, <<-MSG.squish
+ cannot touch on a new or destroyed record object. Consider using
+ persisted?, new_record?, or destroyed? before touching
+ MSG
+ end
+
+ time ||= current_time_from_proper_timezone
+ attributes = timestamp_attributes_for_update_in_model
+ attributes.concat(names)
+
+ unless attributes.empty?
+ changes = {}
+
+ attributes.each do |column|
+ column = column.to_s
+ changes[column] = write_attribute(column, time)
+ end
+
+ scope = _relation_for_itself
+
+ if locking_enabled?
+ locking_column = self.class.locking_column
+ scope = scope.where(locking_column => read_attribute_before_type_cast(locking_column))
+ changes[locking_column] = increment_lock
+ end
+
+ clear_attribute_changes(changes.keys)
+ result = scope.update_all(changes) == 1
+
+ if !result && locking_enabled?
+ raise ActiveRecord::StaleObjectError.new(self, "touch")
+ end
+
+ @_trigger_update_callback = result
+ result
+ else
+ true
+ end
+ end
+
+ private
+
+ # A hook to be overridden by association modules.
+ def destroy_associations
+ end
+
+ def destroy_row
+ relation_for_destroy.delete_all
+ end
+
+ def relation_for_destroy
+ _relation_for_itself
+ end
+
+ def _relation_for_itself
+ self.class.unscoped.where(self.class.primary_key => id)
+ end
+
+ def create_or_update(*args, &block)
+ _raise_readonly_record_error if readonly?
+ result = new_record? ? _create_record(&block) : _update_record(*args, &block)
+ result != false
+ end
+
+ # Updates the associated record with values matching those of the instance attributes.
+ # Returns the number of affected rows.
+ def _update_record(attribute_names = self.attribute_names)
+ attributes_values = arel_attributes_with_values_for_update(attribute_names)
+ if attributes_values.empty?
+ rows_affected = 0
+ @_trigger_update_callback = true
+ else
+ rows_affected = self.class.unscoped._update_record attributes_values, id, id_in_database
+ @_trigger_update_callback = rows_affected > 0
+ end
+
+ yield(self) if block_given?
+
+ rows_affected
+ end
+
+ # Creates a record with values matching those of the instance attributes
+ # and returns its id.
+ def _create_record(attribute_names = self.attribute_names)
+ attributes_values = arel_attributes_with_values_for_create(attribute_names)
+
+ new_id = self.class.unscoped.insert attributes_values
+ self.id ||= new_id if self.class.primary_key
+
+ @new_record = false
+
+ yield(self) if block_given?
+
+ id
+ end
+
+ def verify_readonly_attribute(name)
+ raise ActiveRecordError, "#{name} is marked as readonly" if self.class.readonly_attributes.include?(name)
+ end
+
+ def _raise_record_not_destroyed
+ @_association_destroy_exception ||= nil
+ raise @_association_destroy_exception || RecordNotDestroyed.new("Failed to destroy the record", self)
+ ensure
+ @_association_destroy_exception = nil
+ end
+
+ def belongs_to_touch_method
+ :touch
+ end
+
+ def _raise_readonly_record_error
+ raise ReadOnlyRecord, "#{self.class} is marked as readonly"
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/query_cache.rb b/activerecord/lib/active_record/query_cache.rb
new file mode 100644
index 0000000000..3d5babb8b7
--- /dev/null
+++ b/activerecord/lib/active_record/query_cache.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record Query Cache
+ class QueryCache
+ module ClassMethods
+ # Enable the query cache within the block if Active Record is configured.
+ # If it's not, it will execute the given block.
+ def cache(&block)
+ if configurations.empty?
+ yield
+ else
+ connection.cache(&block)
+ end
+ end
+
+ # Disable the query cache within the block if Active Record is configured.
+ # If it's not, it will execute the given block.
+ def uncached(&block)
+ if configurations.empty?
+ yield
+ else
+ connection.uncached(&block)
+ end
+ end
+ end
+
+ def self.run
+ caching_pool = ActiveRecord::Base.connection_pool
+ caching_was_enabled = caching_pool.query_cache_enabled
+
+ caching_pool.enable_query_cache!
+
+ [caching_pool, caching_was_enabled]
+ end
+
+ def self.complete((caching_pool, caching_was_enabled))
+ caching_pool.disable_query_cache! unless caching_was_enabled
+
+ ActiveRecord::Base.connection_handler.connection_pool_list.each do |pool|
+ pool.release_connection if pool.active_connection? && !pool.connection.transaction_open?
+ end
+ end
+
+ def self.install_executor_hooks(executor = ActiveSupport::Executor)
+ executor.register_hook(self)
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/querying.rb b/activerecord/lib/active_record/querying.rb
new file mode 100644
index 0000000000..f780538319
--- /dev/null
+++ b/activerecord/lib/active_record/querying.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Querying
+ delegate :find, :take, :take!, :first, :first!, :last, :last!, :exists?, :any?, :many?, :none?, :one?, to: :all
+ delegate :second, :second!, :third, :third!, :fourth, :fourth!, :fifth, :fifth!, :forty_two, :forty_two!, :third_to_last, :third_to_last!, :second_to_last, :second_to_last!, to: :all
+ delegate :first_or_create, :first_or_create!, :first_or_initialize, to: :all
+ delegate :find_or_create_by, :find_or_create_by!, :find_or_initialize_by, to: :all
+ delegate :find_by, :find_by!, to: :all
+ delegate :destroy, :destroy_all, :delete, :delete_all, :update, :update_all, to: :all
+ delegate :find_each, :find_in_batches, :in_batches, to: :all
+ delegate :select, :group, :order, :except, :reorder, :limit, :offset, :joins, :left_joins, :left_outer_joins, :or,
+ :where, :rewhere, :preload, :eager_load, :includes, :from, :lock, :readonly, :extending,
+ :having, :create_with, :distinct, :references, :none, :unscope, :merge, to: :all
+ delegate :count, :average, :minimum, :maximum, :sum, :calculate, to: :all
+ delegate :pluck, :ids, to: :all
+
+ # Executes a custom SQL query against your database and returns all the results. The results will
+ # be returned as an array with columns requested encapsulated as attributes of the model you call
+ # this method from. If you call <tt>Product.find_by_sql</tt> then the results will be returned in
+ # a +Product+ object with the attributes you specified in the SQL query.
+ #
+ # If you call a complicated SQL query which spans multiple tables the columns specified by the
+ # SELECT will be attributes of the model, whether or not they are columns of the corresponding
+ # table.
+ #
+ # The +sql+ parameter is a full SQL query as a string. It will be called as is, there will be
+ # no database agnostic conversions performed. This should be a last resort because using, for example,
+ # MySQL specific terms will lock you to using that particular database engine or require you to
+ # change your call if you switch engines.
+ #
+ # # A simple SQL query spanning multiple tables
+ # Post.find_by_sql "SELECT p.title, c.author FROM posts p, comments c WHERE p.id = c.post_id"
+ # # => [#<Post:0x36bff9c @attributes={"title"=>"Ruby Meetup", "first_name"=>"Quentin"}>, ...]
+ #
+ # You can use the same string replacement techniques as you can with <tt>ActiveRecord::QueryMethods#where</tt>:
+ #
+ # Post.find_by_sql ["SELECT title FROM posts WHERE author = ? AND created > ?", author_id, start_date]
+ # Post.find_by_sql ["SELECT body FROM comments WHERE author = :user_id OR approved_by = :user_id", { :user_id => user_id }]
+ def find_by_sql(sql, binds = [], preparable: nil, &block)
+ result_set = connection.select_all(sanitize_sql(sql), "#{name} Load", binds, preparable: preparable)
+ column_types = result_set.column_types.dup
+ columns_hash.each_key { |k| column_types.delete k }
+ message_bus = ActiveSupport::Notifications.instrumenter
+
+ payload = {
+ record_count: result_set.length,
+ class_name: name
+ }
+
+ message_bus.instrument("instantiation.active_record", payload) do
+ result_set.map { |record| instantiate(record, column_types, &block) }
+ end
+ end
+
+ # Returns the result of an SQL statement that should only include a COUNT(*) in the SELECT part.
+ # The use of this method should be restricted to complicated SQL queries that can't be executed
+ # using the ActiveRecord::Calculations class methods. Look into those before using this.
+ #
+ # Product.count_by_sql "SELECT COUNT(*) FROM sales s, customers c WHERE s.customer_id = c.id"
+ # # => 12
+ #
+ # ==== Parameters
+ #
+ # * +sql+ - An SQL statement which should return a count query from the database, see the example above.
+ def count_by_sql(sql)
+ connection.select_value(sanitize_sql(sql), "#{name} Count").to_i
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/railtie.rb b/activerecord/lib/active_record/railtie.rb
new file mode 100644
index 0000000000..ead42d64ec
--- /dev/null
+++ b/activerecord/lib/active_record/railtie.rb
@@ -0,0 +1,214 @@
+# frozen_string_literal: true
+
+require "active_record"
+require "rails"
+require "active_model/railtie"
+
+# For now, action_controller must always be present with
+# Rails, so let's make sure that it gets required before
+# here. This is needed for correctly setting up the middleware.
+# In the future, this might become an optional require.
+require "action_controller/railtie"
+
+module ActiveRecord
+ # = Active Record Railtie
+ class Railtie < Rails::Railtie # :nodoc:
+ config.active_record = ActiveSupport::OrderedOptions.new
+
+ config.app_generators.orm :active_record, migration: true,
+ timestamps: true
+
+ config.action_dispatch.rescue_responses.merge!(
+ "ActiveRecord::RecordNotFound" => :not_found,
+ "ActiveRecord::StaleObjectError" => :conflict,
+ "ActiveRecord::RecordInvalid" => :unprocessable_entity,
+ "ActiveRecord::RecordNotSaved" => :unprocessable_entity
+ )
+
+ config.active_record.use_schema_cache_dump = true
+ config.active_record.maintain_test_schema = true
+
+ config.active_record.sqlite3 = ActiveSupport::OrderedOptions.new
+ config.active_record.sqlite3.represent_boolean_as_integer = nil
+
+ config.eager_load_namespaces << ActiveRecord
+
+ rake_tasks do
+ namespace :db do
+ task :load_config do
+ ActiveRecord::Tasks::DatabaseTasks.database_configuration = Rails.application.config.database_configuration
+
+ if defined?(ENGINE_ROOT) && engine = Rails::Engine.find(ENGINE_ROOT)
+ if engine.paths["db/migrate"].existent
+ ActiveRecord::Tasks::DatabaseTasks.migrations_paths += engine.paths["db/migrate"].to_a
+ end
+ end
+ end
+ end
+
+ load "active_record/railties/databases.rake"
+ end
+
+ # When loading console, force ActiveRecord::Base to be loaded
+ # to avoid cross references when loading a constant for the
+ # first time. Also, make it output to STDERR.
+ console do |app|
+ require_relative "railties/console_sandbox" if app.sandbox?
+ require_relative "base"
+ unless ActiveSupport::Logger.logger_outputs_to?(Rails.logger, STDERR, STDOUT)
+ console = ActiveSupport::Logger.new(STDERR)
+ Rails.logger.extend ActiveSupport::Logger.broadcast console
+ end
+ end
+
+ runner do
+ require_relative "base"
+ end
+
+ initializer "active_record.initialize_timezone" do
+ ActiveSupport.on_load(:active_record) do
+ self.time_zone_aware_attributes = true
+ self.default_timezone = :utc
+ end
+ end
+
+ initializer "active_record.logger" do
+ ActiveSupport.on_load(:active_record) { self.logger ||= ::Rails.logger }
+ end
+
+ initializer "active_record.migration_error" do
+ if config.active_record.delete(:migration_error) == :page_load
+ config.app_middleware.insert_after ::ActionDispatch::Callbacks,
+ ActiveRecord::Migration::CheckPending
+ end
+ end
+
+ initializer "active_record.check_schema_cache_dump" do
+ if config.active_record.delete(:use_schema_cache_dump)
+ config.after_initialize do |app|
+ ActiveSupport.on_load(:active_record) do
+ filename = File.join(app.config.paths["db"].first, "schema_cache.yml")
+
+ if File.file?(filename)
+ cache = YAML.load(File.read(filename))
+ if cache.version == ActiveRecord::Migrator.current_version
+ connection.schema_cache = cache
+ connection_pool.schema_cache = cache.dup
+ else
+ warn "Ignoring db/schema_cache.yml because it has expired. The current schema version is #{ActiveRecord::Migrator.current_version}, but the one in the cache is #{cache.version}."
+ end
+ end
+ end
+ end
+ end
+ end
+
+ initializer "active_record.warn_on_records_fetched_greater_than" do
+ if config.active_record.warn_on_records_fetched_greater_than
+ ActiveSupport.on_load(:active_record) do
+ require_relative "relation/record_fetch_warning"
+ end
+ end
+ end
+
+ initializer "active_record.set_configs" do |app|
+ ActiveSupport.on_load(:active_record) do
+ configs = app.config.active_record.dup
+ configs.delete(:sqlite3)
+ configs.each do |k, v|
+ send "#{k}=", v
+ end
+ end
+ end
+
+ # This sets the database configuration from Configuration#database_configuration
+ # and then establishes the connection.
+ initializer "active_record.initialize_database" do
+ ActiveSupport.on_load(:active_record) do
+ self.configurations = Rails.application.config.database_configuration
+
+ begin
+ establish_connection
+ rescue ActiveRecord::NoDatabaseError
+ warn <<-end_warning
+Oops - You have a database configured, but it doesn't exist yet!
+
+Here's how to get started:
+
+ 1. Configure your database in config/database.yml.
+ 2. Run `bin/rails db:create` to create the database.
+ 3. Run `bin/rails db:setup` to load your database schema.
+end_warning
+ raise
+ end
+ end
+ end
+
+ # Expose database runtime to controller for logging.
+ initializer "active_record.log_runtime" do
+ require_relative "railties/controller_runtime"
+ ActiveSupport.on_load(:action_controller) do
+ include ActiveRecord::Railties::ControllerRuntime
+ end
+ end
+
+ initializer "active_record.set_reloader_hooks" do
+ ActiveSupport.on_load(:active_record) do
+ ActiveSupport::Reloader.before_class_unload do
+ if ActiveRecord::Base.connected?
+ ActiveRecord::Base.clear_cache!
+ ActiveRecord::Base.clear_reloadable_connections!
+ end
+ end
+ end
+ end
+
+ initializer "active_record.set_executor_hooks" do
+ ActiveSupport.on_load(:active_record) do
+ ActiveRecord::QueryCache.install_executor_hooks
+ end
+ end
+
+ initializer "active_record.add_watchable_files" do |app|
+ path = app.paths["db"].first
+ config.watchable_files.concat ["#{path}/schema.rb", "#{path}/structure.sql"]
+ end
+
+ initializer "active_record.clear_active_connections" do
+ config.after_initialize do
+ ActiveSupport.on_load(:active_record) do
+ clear_active_connections!
+ end
+ end
+ end
+
+ initializer "active_record.check_represent_sqlite3_boolean_as_integer" do
+ config.after_initialize do
+ ActiveSupport.on_load(:active_record_sqlite3adapter) do
+ represent_boolean_as_integer = Rails.application.config.active_record.sqlite3.delete(:represent_boolean_as_integer)
+ unless represent_boolean_as_integer.nil?
+ ActiveRecord::ConnectionAdapters::SQLite3Adapter.represent_boolean_as_integer = represent_boolean_as_integer
+ end
+
+ unless ActiveRecord::ConnectionAdapters::SQLite3Adapter.represent_boolean_as_integer
+ ActiveSupport::Deprecation.warn <<-MSG
+Leaving `ActiveRecord::ConnectionAdapters::SQLite3Adapter.represent_boolean_as_integer`
+set to false is deprecated. SQLite databases have used 't' and 'f' to serialize
+boolean values and must have old data converted to 1 and 0 (its native boolean
+serialization) before setting this flag to true. Conversion can be accomplished
+by setting up a rake task which runs
+
+ ExampleModel.where("boolean_column = 't'").update_all(boolean_column: 1)
+ ExampleModel.where("boolean_column = 'f'").update_all(boolean_column: 0)
+
+for all models and all boolean columns, after which the flag must be set to
+true by adding the following to your application.rb file:
+
+ Rails.application.config.active_record.sqlite3.represent_boolean_as_integer = true
+MSG
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/railties/console_sandbox.rb b/activerecord/lib/active_record/railties/console_sandbox.rb
new file mode 100644
index 0000000000..8917638a5d
--- /dev/null
+++ b/activerecord/lib/active_record/railties/console_sandbox.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+ActiveRecord::Base.connection.begin_transaction(joinable: false)
+
+at_exit do
+ ActiveRecord::Base.connection.rollback_transaction
+end
diff --git a/activerecord/lib/active_record/railties/controller_runtime.rb b/activerecord/lib/active_record/railties/controller_runtime.rb
new file mode 100644
index 0000000000..3cf66980a5
--- /dev/null
+++ b/activerecord/lib/active_record/railties/controller_runtime.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/module/attr_internal"
+require_relative "../log_subscriber"
+
+module ActiveRecord
+ module Railties # :nodoc:
+ module ControllerRuntime #:nodoc:
+ extend ActiveSupport::Concern
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+
+ attr_internal :db_runtime
+
+ private
+
+ def process_action(action, *args)
+ # We also need to reset the runtime before each action
+ # because of queries in middleware or in cases we are streaming
+ # and it won't be cleaned up by the method below.
+ ActiveRecord::LogSubscriber.reset_runtime
+ super
+ end
+
+ def cleanup_view_runtime
+ if logger && logger.info? && ActiveRecord::Base.connected?
+ db_rt_before_render = ActiveRecord::LogSubscriber.reset_runtime
+ self.db_runtime = (db_runtime || 0) + db_rt_before_render
+ runtime = super
+ db_rt_after_render = ActiveRecord::LogSubscriber.reset_runtime
+ self.db_runtime += db_rt_after_render
+ runtime - db_rt_after_render
+ else
+ super
+ end
+ end
+
+ def append_info_to_payload(payload)
+ super
+ if ActiveRecord::Base.connected?
+ payload[:db_runtime] = (db_runtime || 0) + ActiveRecord::LogSubscriber.reset_runtime
+ end
+ end
+
+ module ClassMethods # :nodoc:
+ def log_process_action(payload)
+ messages, db_runtime = super, payload[:db_runtime]
+ messages << ("ActiveRecord: %.1fms" % db_runtime.to_f) if db_runtime
+ messages
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/railties/databases.rake b/activerecord/lib/active_record/railties/databases.rake
new file mode 100644
index 0000000000..691b3612d8
--- /dev/null
+++ b/activerecord/lib/active_record/railties/databases.rake
@@ -0,0 +1,369 @@
+# frozen_string_literal: true
+
+require "active_record"
+
+db_namespace = namespace :db do
+ desc "Set the environment value for the database"
+ task "environment:set" => [:environment, :load_config] do
+ ActiveRecord::InternalMetadata.create_table
+ ActiveRecord::InternalMetadata[:environment] = ActiveRecord::Migrator.current_environment
+ end
+
+ task check_protected_environments: [:environment, :load_config] do
+ ActiveRecord::Tasks::DatabaseTasks.check_protected_environments!
+ end
+
+ task :load_config do
+ ActiveRecord::Base.configurations = ActiveRecord::Tasks::DatabaseTasks.database_configuration || {}
+ ActiveRecord::Migrator.migrations_paths = ActiveRecord::Tasks::DatabaseTasks.migrations_paths
+ end
+
+ namespace :create do
+ task all: :load_config do
+ ActiveRecord::Tasks::DatabaseTasks.create_all
+ end
+ end
+
+ desc "Creates the database from DATABASE_URL or config/database.yml for the current RAILS_ENV (use db:create:all to create all databases in the config). Without RAILS_ENV or when RAILS_ENV is development, it defaults to creating the development and test databases."
+ task create: [:load_config] do
+ ActiveRecord::Tasks::DatabaseTasks.create_current
+ end
+
+ namespace :drop do
+ task all: [:load_config, :check_protected_environments] do
+ ActiveRecord::Tasks::DatabaseTasks.drop_all
+ end
+ end
+
+ desc "Drops the database from DATABASE_URL or config/database.yml for the current RAILS_ENV (use db:drop:all to drop all databases in the config). Without RAILS_ENV or when RAILS_ENV is development, it defaults to dropping the development and test databases."
+ task drop: [:load_config, :check_protected_environments] do
+ db_namespace["drop:_unsafe"].invoke
+ end
+
+ task "drop:_unsafe" => [:load_config] do
+ ActiveRecord::Tasks::DatabaseTasks.drop_current
+ end
+
+ namespace :purge do
+ task all: [:load_config, :check_protected_environments] do
+ ActiveRecord::Tasks::DatabaseTasks.purge_all
+ end
+ end
+
+ # desc "Empty the database from DATABASE_URL or config/database.yml for the current RAILS_ENV (use db:purge:all to purge all databases in the config). Without RAILS_ENV it defaults to purging the development and test databases."
+ task purge: [:load_config, :check_protected_environments] do
+ ActiveRecord::Tasks::DatabaseTasks.purge_current
+ end
+
+ desc "Migrate the database (options: VERSION=x, VERBOSE=false, SCOPE=blog)."
+ task migrate: [:environment, :load_config] do
+ ActiveRecord::Tasks::DatabaseTasks.migrate
+ db_namespace["_dump"].invoke
+ end
+
+ # IMPORTANT: This task won't dump the schema if ActiveRecord::Base.dump_schema_after_migration is set to false
+ task :_dump do
+ if ActiveRecord::Base.dump_schema_after_migration
+ case ActiveRecord::Base.schema_format
+ when :ruby then db_namespace["schema:dump"].invoke
+ when :sql then db_namespace["structure:dump"].invoke
+ else
+ raise "unknown schema format #{ActiveRecord::Base.schema_format}"
+ end
+ end
+ # Allow this task to be called as many times as required. An example is the
+ # migrate:redo task, which calls other two internally that depend on this one.
+ db_namespace["_dump"].reenable
+ end
+
+ namespace :migrate do
+ # desc 'Rollbacks the database one migration and re migrate up (options: STEP=x, VERSION=x).'
+ task redo: [:environment, :load_config] do
+ raise "Empty VERSION provided" if ENV["VERSION"] && ENV["VERSION"].empty?
+
+ if ENV["VERSION"]
+ db_namespace["migrate:down"].invoke
+ db_namespace["migrate:up"].invoke
+ else
+ db_namespace["rollback"].invoke
+ db_namespace["migrate"].invoke
+ end
+ end
+
+ # desc 'Resets your database using your migrations for the current environment'
+ task reset: ["db:drop", "db:create", "db:migrate"]
+
+ # desc 'Runs the "up" for a given migration VERSION.'
+ task up: [:environment, :load_config] do
+ raise "VERSION is required" if !ENV["VERSION"] || ENV["VERSION"].empty?
+
+ version = ENV["VERSION"] ? ENV["VERSION"].to_i : nil
+ ActiveRecord::Migrator.run(:up, ActiveRecord::Tasks::DatabaseTasks.migrations_paths, version)
+ db_namespace["_dump"].invoke
+ end
+
+ # desc 'Runs the "down" for a given migration VERSION.'
+ task down: [:environment, :load_config] do
+ raise "VERSION is required - To go down one migration, use db:rollback" if !ENV["VERSION"] || ENV["VERSION"].empty?
+ version = ENV["VERSION"] ? ENV["VERSION"].to_i : nil
+ ActiveRecord::Migrator.run(:down, ActiveRecord::Tasks::DatabaseTasks.migrations_paths, version)
+ db_namespace["_dump"].invoke
+ end
+
+ desc "Display status of migrations"
+ task status: [:environment, :load_config] do
+ unless ActiveRecord::SchemaMigration.table_exists?
+ abort "Schema migrations table does not exist yet."
+ end
+
+ # output
+ puts "\ndatabase: #{ActiveRecord::Base.connection_config[:database]}\n\n"
+ puts "#{'Status'.center(8)} #{'Migration ID'.ljust(14)} Migration Name"
+ puts "-" * 50
+ paths = ActiveRecord::Tasks::DatabaseTasks.migrations_paths
+ ActiveRecord::Migrator.migrations_status(paths).each do |status, version, name|
+ puts "#{status.center(8)} #{version.ljust(14)} #{name}"
+ end
+ puts
+ end
+ end
+
+ desc "Rolls the schema back to the previous version (specify steps w/ STEP=n)."
+ task rollback: [:environment, :load_config] do
+ step = ENV["STEP"] ? ENV["STEP"].to_i : 1
+ ActiveRecord::Migrator.rollback(ActiveRecord::Tasks::DatabaseTasks.migrations_paths, step)
+ db_namespace["_dump"].invoke
+ end
+
+ # desc 'Pushes the schema to the next version (specify steps w/ STEP=n).'
+ task forward: [:environment, :load_config] do
+ step = ENV["STEP"] ? ENV["STEP"].to_i : 1
+ ActiveRecord::Migrator.forward(ActiveRecord::Tasks::DatabaseTasks.migrations_paths, step)
+ db_namespace["_dump"].invoke
+ end
+
+ # desc 'Drops and recreates the database from db/schema.rb for the current environment and loads the seeds.'
+ task reset: [ "db:drop", "db:setup" ]
+
+ # desc "Retrieves the charset for the current environment's database"
+ task charset: [:environment, :load_config] do
+ puts ActiveRecord::Tasks::DatabaseTasks.charset_current
+ end
+
+ # desc "Retrieves the collation for the current environment's database"
+ task collation: [:environment, :load_config] do
+ begin
+ puts ActiveRecord::Tasks::DatabaseTasks.collation_current
+ rescue NoMethodError
+ $stderr.puts "Sorry, your database adapter is not supported yet. Feel free to submit a patch."
+ end
+ end
+
+ desc "Retrieves the current schema version number"
+ task version: [:environment, :load_config] do
+ puts "Current version: #{ActiveRecord::Migrator.current_version}"
+ end
+
+ # desc "Raises an error if there are pending migrations"
+ task abort_if_pending_migrations: [:environment, :load_config] do
+ pending_migrations = ActiveRecord::Migrator.open(ActiveRecord::Tasks::DatabaseTasks.migrations_paths).pending_migrations
+
+ if pending_migrations.any?
+ puts "You have #{pending_migrations.size} pending #{pending_migrations.size > 1 ? 'migrations:' : 'migration:'}"
+ pending_migrations.each do |pending_migration|
+ puts " %4d %s" % [pending_migration.version, pending_migration.name]
+ end
+ abort %{Run `rails db:migrate` to update your database then try again.}
+ end
+ end
+
+ desc "Creates the database, loads the schema, and initializes with the seed data (use db:reset to also drop the database first)"
+ task setup: ["db:schema:load_if_ruby", "db:structure:load_if_sql", :seed]
+
+ desc "Loads the seed data from db/seeds.rb"
+ task :seed do
+ db_namespace["abort_if_pending_migrations"].invoke
+ ActiveRecord::Tasks::DatabaseTasks.load_seed
+ end
+
+ namespace :fixtures do
+ desc "Loads fixtures into the current environment's database. Load specific fixtures using FIXTURES=x,y. Load from subdirectory in test/fixtures using FIXTURES_DIR=z. Specify an alternative path (eg. spec/fixtures) using FIXTURES_PATH=spec/fixtures."
+ task load: [:environment, :load_config] do
+ require_relative "../fixtures"
+
+ base_dir = ActiveRecord::Tasks::DatabaseTasks.fixtures_path
+
+ fixtures_dir = if ENV["FIXTURES_DIR"]
+ File.join base_dir, ENV["FIXTURES_DIR"]
+ else
+ base_dir
+ end
+
+ fixture_files = if ENV["FIXTURES"]
+ ENV["FIXTURES"].split(",")
+ else
+ # The use of String#[] here is to support namespaced fixtures.
+ Dir["#{fixtures_dir}/**/*.yml"].map { |f| f[(fixtures_dir.size + 1)..-5] }
+ end
+
+ ActiveRecord::FixtureSet.create_fixtures(fixtures_dir, fixture_files)
+ end
+
+ # desc "Search for a fixture given a LABEL or ID. Specify an alternative path (eg. spec/fixtures) using FIXTURES_PATH=spec/fixtures."
+ task identify: [:environment, :load_config] do
+ require_relative "../fixtures"
+
+ label, id = ENV["LABEL"], ENV["ID"]
+ raise "LABEL or ID required" if label.blank? && id.blank?
+
+ puts %Q(The fixture ID for "#{label}" is #{ActiveRecord::FixtureSet.identify(label)}.) if label
+
+ base_dir = ActiveRecord::Tasks::DatabaseTasks.fixtures_path
+
+ Dir["#{base_dir}/**/*.yml"].each do |file|
+ if data = YAML::load(ERB.new(IO.read(file)).result)
+ data.each_key do |key|
+ key_id = ActiveRecord::FixtureSet.identify(key)
+
+ if key == label || key_id == id.to_i
+ puts "#{file}: #{key} (#{key_id})"
+ end
+ end
+ end
+ end
+ end
+ end
+
+ namespace :schema do
+ desc "Creates a db/schema.rb file that is portable against any DB supported by Active Record"
+ task dump: [:environment, :load_config] do
+ require_relative "../schema_dumper"
+ filename = ENV["SCHEMA"] || File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, "schema.rb")
+ File.open(filename, "w:utf-8") do |file|
+ ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection, file)
+ end
+ db_namespace["schema:dump"].reenable
+ end
+
+ desc "Loads a schema.rb file into the database"
+ task load: [:environment, :load_config, :check_protected_environments] do
+ ActiveRecord::Tasks::DatabaseTasks.load_schema_current(:ruby, ENV["SCHEMA"])
+ end
+
+ task load_if_ruby: ["db:create", :environment] do
+ db_namespace["schema:load"].invoke if ActiveRecord::Base.schema_format == :ruby
+ end
+
+ namespace :cache do
+ desc "Creates a db/schema_cache.yml file."
+ task dump: [:environment, :load_config] do
+ conn = ActiveRecord::Base.connection
+ filename = File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, "schema_cache.yml")
+ ActiveRecord::Tasks::DatabaseTasks.dump_schema_cache(conn, filename)
+ end
+
+ desc "Clears a db/schema_cache.yml file."
+ task clear: [:environment, :load_config] do
+ filename = File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, "schema_cache.yml")
+ rm_f filename, verbose: false
+ end
+ end
+
+ end
+
+ namespace :structure do
+ desc "Dumps the database structure to db/structure.sql. Specify another file with SCHEMA=db/my_structure.sql"
+ task dump: [:environment, :load_config] do
+ filename = ENV["SCHEMA"] || File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, "structure.sql")
+ current_config = ActiveRecord::Tasks::DatabaseTasks.current_config
+ ActiveRecord::Tasks::DatabaseTasks.structure_dump(current_config, filename)
+
+ if ActiveRecord::SchemaMigration.table_exists?
+ File.open(filename, "a") do |f|
+ f.puts ActiveRecord::Base.connection.dump_schema_information
+ f.print "\n"
+ end
+ end
+ db_namespace["structure:dump"].reenable
+ end
+
+ desc "Recreates the databases from the structure.sql file"
+ task load: [:environment, :load_config, :check_protected_environments] do
+ ActiveRecord::Tasks::DatabaseTasks.load_schema_current(:sql, ENV["SCHEMA"])
+ end
+
+ task load_if_sql: ["db:create", :environment] do
+ db_namespace["structure:load"].invoke if ActiveRecord::Base.schema_format == :sql
+ end
+ end
+
+ namespace :test do
+ # desc "Recreate the test database from the current schema"
+ task load: %w(db:test:purge) do
+ case ActiveRecord::Base.schema_format
+ when :ruby
+ db_namespace["test:load_schema"].invoke
+ when :sql
+ db_namespace["test:load_structure"].invoke
+ end
+ end
+
+ # desc "Recreate the test database from an existent schema.rb file"
+ task load_schema: %w(db:test:purge) do
+ begin
+ should_reconnect = ActiveRecord::Base.connection_pool.active_connection?
+ ActiveRecord::Schema.verbose = false
+ ActiveRecord::Tasks::DatabaseTasks.load_schema ActiveRecord::Base.configurations["test"], :ruby, ENV["SCHEMA"]
+ ensure
+ if should_reconnect
+ ActiveRecord::Base.establish_connection(ActiveRecord::Base.configurations[ActiveRecord::Tasks::DatabaseTasks.env])
+ end
+ end
+ end
+
+ # desc "Recreate the test database from an existent structure.sql file"
+ task load_structure: %w(db:test:purge) do
+ ActiveRecord::Tasks::DatabaseTasks.load_schema ActiveRecord::Base.configurations["test"], :sql, ENV["SCHEMA"]
+ end
+
+ # desc "Empty the test database"
+ task purge: %w(environment load_config check_protected_environments) do
+ ActiveRecord::Tasks::DatabaseTasks.purge ActiveRecord::Base.configurations["test"]
+ end
+
+ # desc 'Load the test schema'
+ task prepare: %w(environment load_config) do
+ unless ActiveRecord::Base.configurations.blank?
+ db_namespace["test:load"].invoke
+ end
+ end
+ end
+end
+
+namespace :railties do
+ namespace :install do
+ # desc "Copies missing migrations from Railties (e.g. engines). You can specify Railties to use with FROM=railtie1,railtie2"
+ task migrations: :'db:load_config' do
+ to_load = ENV["FROM"].blank? ? :all : ENV["FROM"].split(",").map(&:strip)
+ railties = {}
+ Rails.application.migration_railties.each do |railtie|
+ next unless to_load == :all || to_load.include?(railtie.railtie_name)
+
+ if railtie.respond_to?(:paths) && (path = railtie.paths["db/migrate"].first)
+ railties[railtie.railtie_name] = path
+ end
+ end
+
+ on_skip = Proc.new do |name, migration|
+ puts "NOTE: Migration #{migration.basename} from #{name} has been skipped. Migration with the same name already exists."
+ end
+
+ on_copy = Proc.new do |name, migration|
+ puts "Copied migration #{migration.basename} from #{name}"
+ end
+
+ ActiveRecord::Migration.copy(ActiveRecord::Tasks::DatabaseTasks.migrations_paths.first, railties,
+ on_skip: on_skip, on_copy: on_copy)
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/railties/jdbcmysql_error.rb b/activerecord/lib/active_record/railties/jdbcmysql_error.rb
new file mode 100644
index 0000000000..72c75ddd52
--- /dev/null
+++ b/activerecord/lib/active_record/railties/jdbcmysql_error.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+#FIXME Remove if ArJdbcMysql will give.
+module ArJdbcMySQL #:nodoc:
+ class Error < StandardError #:nodoc:
+ attr_accessor :error_number, :sql_state
+
+ def initialize(msg)
+ super
+ @error_number = nil
+ @sql_state = nil
+ end
+
+ # Mysql gem compatibility
+ alias_method :errno, :error_number
+ alias_method :error, :message
+ end
+end
diff --git a/activerecord/lib/active_record/readonly_attributes.rb b/activerecord/lib/active_record/readonly_attributes.rb
new file mode 100644
index 0000000000..7bc26993d5
--- /dev/null
+++ b/activerecord/lib/active_record/readonly_attributes.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ReadonlyAttributes
+ extend ActiveSupport::Concern
+
+ included do
+ class_attribute :_attr_readonly, instance_accessor: false, default: []
+ end
+
+ module ClassMethods
+ # Attributes listed as readonly will be used to create a new record but update operations will
+ # ignore these fields.
+ def attr_readonly(*attributes)
+ self._attr_readonly = Set.new(attributes.map(&:to_s)) + (_attr_readonly || [])
+ end
+
+ # Returns an array of all the attributes that have been specified as readonly.
+ def readonly_attributes
+ _attr_readonly
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/reflection.rb b/activerecord/lib/active_record/reflection.rb
new file mode 100644
index 0000000000..efe56454d0
--- /dev/null
+++ b/activerecord/lib/active_record/reflection.rb
@@ -0,0 +1,1141 @@
+# frozen_string_literal: true
+
+require "thread"
+require "active_support/core_ext/string/filters"
+require "active_support/deprecation"
+
+module ActiveRecord
+ # = Active Record Reflection
+ module Reflection # :nodoc:
+ extend ActiveSupport::Concern
+
+ included do
+ class_attribute :_reflections, instance_writer: false, default: {}
+ class_attribute :aggregate_reflections, instance_writer: false, default: {}
+ end
+
+ def self.create(macro, name, scope, options, ar)
+ klass = \
+ case macro
+ when :composed_of
+ AggregateReflection
+ when :has_many
+ HasManyReflection
+ when :has_one
+ HasOneReflection
+ when :belongs_to
+ BelongsToReflection
+ else
+ raise "Unsupported Macro: #{macro}"
+ end
+
+ reflection = klass.new(name, scope, options, ar)
+ options[:through] ? ThroughReflection.new(reflection) : reflection
+ end
+
+ def self.add_reflection(ar, name, reflection)
+ ar.clear_reflections_cache
+ ar._reflections = ar._reflections.merge(name.to_s => reflection)
+ end
+
+ def self.add_aggregate_reflection(ar, name, reflection)
+ ar.aggregate_reflections = ar.aggregate_reflections.merge(name.to_s => reflection)
+ end
+
+ # \Reflection enables the ability to examine the associations and aggregations of
+ # Active Record classes and objects. This information, for example,
+ # can be used in a form builder that takes an Active Record object
+ # and creates input fields for all of the attributes depending on their type
+ # and displays the associations to other objects.
+ #
+ # MacroReflection class has info for AggregateReflection and AssociationReflection
+ # classes.
+ module ClassMethods
+ # Returns an array of AggregateReflection objects for all the aggregations in the class.
+ def reflect_on_all_aggregations
+ aggregate_reflections.values
+ end
+
+ # Returns the AggregateReflection object for the named +aggregation+ (use the symbol).
+ #
+ # Account.reflect_on_aggregation(:balance) # => the balance AggregateReflection
+ #
+ def reflect_on_aggregation(aggregation)
+ aggregate_reflections[aggregation.to_s]
+ end
+
+ # Returns a Hash of name of the reflection as the key and an AssociationReflection as the value.
+ #
+ # Account.reflections # => {"balance" => AggregateReflection}
+ #
+ def reflections
+ @__reflections ||= begin
+ ref = {}
+
+ _reflections.each do |name, reflection|
+ parent_reflection = reflection.parent_reflection
+
+ if parent_reflection
+ parent_name = parent_reflection.name
+ ref[parent_name.to_s] = parent_reflection
+ else
+ ref[name] = reflection
+ end
+ end
+
+ ref
+ end
+ end
+
+ # Returns an array of AssociationReflection objects for all the
+ # associations in the class. If you only want to reflect on a certain
+ # association type, pass in the symbol (<tt>:has_many</tt>, <tt>:has_one</tt>,
+ # <tt>:belongs_to</tt>) as the first parameter.
+ #
+ # Example:
+ #
+ # Account.reflect_on_all_associations # returns an array of all associations
+ # Account.reflect_on_all_associations(:has_many) # returns an array of all has_many associations
+ #
+ def reflect_on_all_associations(macro = nil)
+ association_reflections = reflections.values
+ association_reflections.select! { |reflection| reflection.macro == macro } if macro
+ association_reflections
+ end
+
+ # Returns the AssociationReflection object for the +association+ (use the symbol).
+ #
+ # Account.reflect_on_association(:owner) # returns the owner AssociationReflection
+ # Invoice.reflect_on_association(:line_items).macro # returns :has_many
+ #
+ def reflect_on_association(association)
+ reflections[association.to_s]
+ end
+
+ def _reflect_on_association(association) #:nodoc:
+ _reflections[association.to_s]
+ end
+
+ # Returns an array of AssociationReflection objects for all associations which have <tt>:autosave</tt> enabled.
+ def reflect_on_all_autosave_associations
+ reflections.values.select { |reflection| reflection.options[:autosave] }
+ end
+
+ def clear_reflections_cache # :nodoc:
+ @__reflections = nil
+ end
+ end
+
+ # Holds all the methods that are shared between MacroReflection and ThroughReflection.
+ #
+ # AbstractReflection
+ # MacroReflection
+ # AggregateReflection
+ # AssociationReflection
+ # HasManyReflection
+ # HasOneReflection
+ # BelongsToReflection
+ # HasAndBelongsToManyReflection
+ # ThroughReflection
+ # PolymorphicReflection
+ # RuntimeReflection
+ class AbstractReflection # :nodoc:
+ def through_reflection?
+ false
+ end
+
+ def table_name
+ klass.table_name
+ end
+
+ # Returns a new, unsaved instance of the associated class. +attributes+ will
+ # be passed to the class's constructor.
+ def build_association(attributes, &block)
+ klass.new(attributes, &block)
+ end
+
+ def quoted_table_name
+ klass.quoted_table_name
+ end
+
+ def primary_key_type
+ klass.type_for_attribute(klass.primary_key)
+ end
+
+ # Returns the class name for the macro.
+ #
+ # <tt>composed_of :balance, class_name: 'Money'</tt> returns <tt>'Money'</tt>
+ # <tt>has_many :clients</tt> returns <tt>'Client'</tt>
+ def class_name
+ @class_name ||= (options[:class_name] || derive_class_name).to_s
+ end
+
+ JoinKeys = Struct.new(:key, :foreign_key) # :nodoc:
+
+ def join_keys
+ @join_keys ||= get_join_keys(klass)
+ end
+
+ # Returns a list of scopes that should be applied for this Reflection
+ # object when querying the database.
+ def scopes
+ scope ? [scope] : []
+ end
+
+ def scope_chain
+ chain.map(&:scopes)
+ end
+ deprecate :scope_chain
+
+ def build_join_constraint(table, foreign_table)
+ key = join_keys.key
+ foreign_key = join_keys.foreign_key
+
+ constraint = table[key].eq(foreign_table[foreign_key])
+
+ if klass.finder_needs_type_condition?
+ table.create_and([constraint, klass.send(:type_condition, table)])
+ else
+ constraint
+ end
+ end
+
+ def join_scope(table, foreign_klass)
+ predicate_builder = predicate_builder(table)
+ scope_chain_items = join_scopes(table, predicate_builder)
+ klass_scope = klass_join_scope(table, predicate_builder)
+
+ if type
+ klass_scope.where!(type => foreign_klass.base_class.sti_name)
+ end
+
+ scope_chain_items.inject(klass_scope, &:merge!)
+ end
+
+ def join_scopes(table, predicate_builder) # :nodoc:
+ if scope
+ [build_scope(table, predicate_builder).instance_exec(&scope)]
+ else
+ []
+ end
+ end
+
+ def klass_join_scope(table, predicate_builder) # :nodoc:
+ current_scope = klass.current_scope
+
+ if current_scope && current_scope.empty_scope?
+ build_scope(table, predicate_builder)
+ else
+ klass.default_scoped(build_scope(table, predicate_builder))
+ end
+ end
+
+ def constraints
+ chain.flat_map(&:scopes)
+ end
+
+ def counter_cache_column
+ if belongs_to?
+ if options[:counter_cache] == true
+ "#{active_record.name.demodulize.underscore.pluralize}_count"
+ elsif options[:counter_cache]
+ options[:counter_cache].to_s
+ end
+ else
+ options[:counter_cache] ? options[:counter_cache].to_s : "#{name}_count"
+ end
+ end
+
+ def inverse_of
+ return unless inverse_name
+
+ @inverse_of ||= klass._reflect_on_association inverse_name
+ end
+
+ def check_validity_of_inverse!
+ unless polymorphic?
+ if has_inverse? && inverse_of.nil?
+ raise InverseOfAssociationNotFoundError.new(self)
+ end
+ end
+ end
+
+ # This shit is nasty. We need to avoid the following situation:
+ #
+ # * An associated record is deleted via record.destroy
+ # * Hence the callbacks run, and they find a belongs_to on the record with a
+ # :counter_cache options which points back at our owner. So they update the
+ # counter cache.
+ # * In which case, we must make sure to *not* update the counter cache, or else
+ # it will be decremented twice.
+ #
+ # Hence this method.
+ def inverse_which_updates_counter_cache
+ return @inverse_which_updates_counter_cache if defined?(@inverse_which_updates_counter_cache)
+ @inverse_which_updates_counter_cache = klass.reflect_on_all_associations(:belongs_to).find do |inverse|
+ inverse.counter_cache_column == counter_cache_column
+ end
+ end
+ alias inverse_updates_counter_cache? inverse_which_updates_counter_cache
+
+ def inverse_updates_counter_in_memory?
+ inverse_of && inverse_which_updates_counter_cache == inverse_of
+ end
+
+ # Returns whether a counter cache should be used for this association.
+ #
+ # The counter_cache option must be given on either the owner or inverse
+ # association, and the column must be present on the owner.
+ def has_cached_counter?
+ options[:counter_cache] ||
+ inverse_which_updates_counter_cache && inverse_which_updates_counter_cache.options[:counter_cache] &&
+ !!active_record.columns_hash[counter_cache_column]
+ end
+
+ def counter_must_be_updated_by_has_many?
+ !inverse_updates_counter_in_memory? && has_cached_counter?
+ end
+
+ def alias_candidate(name)
+ "#{plural_name}_#{name}"
+ end
+
+ def chain
+ collect_join_chain
+ end
+
+ def get_join_keys(association_klass)
+ JoinKeys.new(join_pk(association_klass), join_fk)
+ end
+
+ def build_scope(table, predicate_builder = predicate_builder(table))
+ Relation.create(klass, table, predicate_builder)
+ end
+
+ protected
+ def actual_source_reflection # FIXME: this is a horrible name
+ self
+ end
+
+ private
+ def predicate_builder(table)
+ PredicateBuilder.new(TableMetadata.new(klass, table))
+ end
+
+ def join_pk(_)
+ foreign_key
+ end
+
+ def join_fk
+ active_record_primary_key
+ end
+ end
+
+ # Base class for AggregateReflection and AssociationReflection. Objects of
+ # AggregateReflection and AssociationReflection are returned by the Reflection::ClassMethods.
+ class MacroReflection < AbstractReflection
+ # Returns the name of the macro.
+ #
+ # <tt>composed_of :balance, class_name: 'Money'</tt> returns <tt>:balance</tt>
+ # <tt>has_many :clients</tt> returns <tt>:clients</tt>
+ attr_reader :name
+
+ attr_reader :scope
+
+ # Returns the hash of options used for the macro.
+ #
+ # <tt>composed_of :balance, class_name: 'Money'</tt> returns <tt>{ class_name: "Money" }</tt>
+ # <tt>has_many :clients</tt> returns <tt>{}</tt>
+ attr_reader :options
+
+ attr_reader :active_record
+
+ attr_reader :plural_name # :nodoc:
+
+ def initialize(name, scope, options, active_record)
+ @name = name
+ @scope = scope
+ @options = options
+ @active_record = active_record
+ @klass = options[:anonymous_class]
+ @plural_name = active_record.pluralize_table_names ?
+ name.to_s.pluralize : name.to_s
+ end
+
+ def autosave=(autosave)
+ @options[:autosave] = autosave
+ parent_reflection = self.parent_reflection
+ if parent_reflection
+ parent_reflection.autosave = autosave
+ end
+ end
+
+ # Returns the class for the macro.
+ #
+ # <tt>composed_of :balance, class_name: 'Money'</tt> returns the Money class
+ # <tt>has_many :clients</tt> returns the Client class
+ def klass
+ @klass ||= compute_class(class_name)
+ end
+
+ def compute_class(name)
+ name.constantize
+ end
+
+ # Returns +true+ if +self+ and +other_aggregation+ have the same +name+ attribute, +active_record+ attribute,
+ # and +other_aggregation+ has an options hash assigned to it.
+ def ==(other_aggregation)
+ super ||
+ other_aggregation.kind_of?(self.class) &&
+ name == other_aggregation.name &&
+ !other_aggregation.options.nil? &&
+ active_record == other_aggregation.active_record
+ end
+
+ def scope_for(klass)
+ scope ? klass.unscoped.instance_exec(nil, &scope) : klass.unscoped
+ end
+
+ private
+ def derive_class_name
+ name.to_s.camelize
+ end
+ end
+
+ # Holds all the metadata about an aggregation as it was specified in the
+ # Active Record class.
+ class AggregateReflection < MacroReflection #:nodoc:
+ def mapping
+ mapping = options[:mapping] || [name, name]
+ mapping.first.is_a?(Array) ? mapping : [mapping]
+ end
+ end
+
+ # Holds all the metadata about an association as it was specified in the
+ # Active Record class.
+ class AssociationReflection < MacroReflection #:nodoc:
+ # Returns the target association's class.
+ #
+ # class Author < ActiveRecord::Base
+ # has_many :books
+ # end
+ #
+ # Author.reflect_on_association(:books).klass
+ # # => Book
+ #
+ # <b>Note:</b> Do not call +klass.new+ or +klass.create+ to instantiate
+ # a new association object. Use +build_association+ or +create_association+
+ # instead. This allows plugins to hook into association object creation.
+ def klass
+ @klass ||= compute_class(class_name)
+ end
+
+ def compute_class(name)
+ active_record.send(:compute_type, name)
+ end
+
+ attr_reader :type, :foreign_type
+ attr_accessor :parent_reflection # Reflection
+
+ def initialize(name, scope, options, active_record)
+ super
+ @automatic_inverse_of = nil
+ @type = options[:as] && (options[:foreign_type] || "#{options[:as]}_type")
+ @foreign_type = options[:foreign_type] || "#{name}_type"
+ @constructable = calculate_constructable(macro, options)
+ @association_scope_cache = {}
+ @scope_lock = Mutex.new
+
+ if options[:class_name] && options[:class_name].class == Class
+ ActiveSupport::Deprecation.warn(<<-MSG.squish)
+ Passing a class to the `class_name` is deprecated and will raise
+ an ArgumentError in Rails 5.2. It eagerloads more classes than
+ necessary and potentially creates circular dependencies.
+
+ Please pass the class name as a string:
+ `#{macro} :#{name}, class_name: '#{options[:class_name]}'`
+ MSG
+ end
+ end
+
+ def association_scope_cache(conn, owner)
+ key = conn.prepared_statements
+ if polymorphic?
+ key = [key, owner._read_attribute(@foreign_type)]
+ end
+ @association_scope_cache[key] ||= @scope_lock.synchronize {
+ @association_scope_cache[key] ||= yield
+ }
+ end
+
+ def constructable? # :nodoc:
+ @constructable
+ end
+
+ def join_table
+ @join_table ||= options[:join_table] || derive_join_table
+ end
+
+ def foreign_key
+ @foreign_key ||= options[:foreign_key] || derive_foreign_key.freeze
+ end
+
+ def association_foreign_key
+ @association_foreign_key ||= options[:association_foreign_key] || class_name.foreign_key
+ end
+
+ # klass option is necessary to support loading polymorphic associations
+ def association_primary_key(klass = nil)
+ options[:primary_key] || primary_key(klass || self.klass)
+ end
+
+ def association_primary_key_type
+ klass.type_for_attribute(association_primary_key.to_s)
+ end
+
+ def active_record_primary_key
+ @active_record_primary_key ||= options[:primary_key] || primary_key(active_record)
+ end
+
+ def check_validity!
+ check_validity_of_inverse!
+ end
+
+ def check_preloadable!
+ return unless scope
+
+ if scope.arity > 0
+ raise ArgumentError, <<-MSG.squish
+ The association scope '#{name}' is instance dependent (the scope
+ block takes an argument). Preloading instance dependent scopes is
+ not supported.
+ MSG
+ end
+ end
+ alias :check_eager_loadable! :check_preloadable!
+
+ def join_id_for(owner) # :nodoc:
+ owner[active_record_primary_key]
+ end
+
+ def through_reflection
+ nil
+ end
+
+ def source_reflection
+ self
+ end
+
+ # A chain of reflections from this one back to the owner. For more see the explanation in
+ # ThroughReflection.
+ def collect_join_chain
+ [self]
+ end
+
+ # This is for clearing cache on the reflection. Useful for tests that need to compare
+ # SQL queries on associations.
+ def clear_association_scope_cache # :nodoc:
+ @association_scope_cache.clear
+ end
+
+ def nested?
+ false
+ end
+
+ def has_scope?
+ scope
+ end
+
+ def has_inverse?
+ inverse_name
+ end
+
+ def polymorphic_inverse_of(associated_class)
+ if has_inverse?
+ if inverse_relationship = associated_class._reflect_on_association(options[:inverse_of])
+ inverse_relationship
+ else
+ raise InverseOfAssociationNotFoundError.new(self, associated_class)
+ end
+ end
+ end
+
+ # Returns the macro type.
+ #
+ # <tt>has_many :clients</tt> returns <tt>:has_many</tt>
+ def macro; raise NotImplementedError; end
+
+ # Returns whether or not this association reflection is for a collection
+ # association. Returns +true+ if the +macro+ is either +has_many+ or
+ # +has_and_belongs_to_many+, +false+ otherwise.
+ def collection?
+ false
+ end
+
+ # Returns whether or not the association should be validated as part of
+ # the parent's validation.
+ #
+ # Unless you explicitly disable validation with
+ # <tt>validate: false</tt>, validation will take place when:
+ #
+ # * you explicitly enable validation; <tt>validate: true</tt>
+ # * you use autosave; <tt>autosave: true</tt>
+ # * the association is a +has_many+ association
+ def validate?
+ !options[:validate].nil? ? options[:validate] : (options[:autosave] == true || collection?)
+ end
+
+ # Returns +true+ if +self+ is a +belongs_to+ reflection.
+ def belongs_to?; false; end
+
+ # Returns +true+ if +self+ is a +has_one+ reflection.
+ def has_one?; false; end
+
+ def association_class; raise NotImplementedError; end
+
+ def polymorphic?
+ options[:polymorphic]
+ end
+
+ VALID_AUTOMATIC_INVERSE_MACROS = [:has_many, :has_one, :belongs_to]
+ INVALID_AUTOMATIC_INVERSE_OPTIONS = [:conditions, :through, :foreign_key]
+
+ def add_as_source(seed)
+ seed
+ end
+
+ def add_as_polymorphic_through(reflection, seed)
+ seed + [PolymorphicReflection.new(self, reflection)]
+ end
+
+ def add_as_through(seed)
+ seed + [self]
+ end
+
+ def extensions
+ Array(options[:extend])
+ end
+
+ private
+
+ def calculate_constructable(macro, options)
+ true
+ end
+
+ # Attempts to find the inverse association name automatically.
+ # If it cannot find a suitable inverse association name, it returns
+ # +nil+.
+ def inverse_name
+ options.fetch(:inverse_of) do
+ @automatic_inverse_of ||= automatic_inverse_of
+ end
+ end
+
+ # returns either false or the inverse association name that it finds.
+ def automatic_inverse_of
+ if can_find_inverse_of_automatically?(self)
+ inverse_name = ActiveSupport::Inflector.underscore(options[:as] || active_record.name.demodulize).to_sym
+
+ begin
+ reflection = klass._reflect_on_association(inverse_name)
+ rescue NameError
+ # Give up: we couldn't compute the klass type so we won't be able
+ # to find any associations either.
+ reflection = false
+ end
+
+ if valid_inverse_reflection?(reflection)
+ return inverse_name
+ end
+ end
+
+ false
+ end
+
+ # Checks if the inverse reflection that is returned from the
+ # +automatic_inverse_of+ method is a valid reflection. We must
+ # make sure that the reflection's active_record name matches up
+ # with the current reflection's klass name.
+ #
+ # Note: klass will always be valid because when there's a NameError
+ # from calling +klass+, +reflection+ will already be set to false.
+ def valid_inverse_reflection?(reflection)
+ reflection &&
+ klass.name == reflection.active_record.name &&
+ can_find_inverse_of_automatically?(reflection)
+ end
+
+ # Checks to see if the reflection doesn't have any options that prevent
+ # us from being able to guess the inverse automatically. First, the
+ # <tt>inverse_of</tt> option cannot be set to false. Second, we must
+ # have <tt>has_many</tt>, <tt>has_one</tt>, <tt>belongs_to</tt> associations.
+ # Third, we must not have options such as <tt>:foreign_key</tt>
+ # which prevent us from correctly guessing the inverse association.
+ #
+ # Anything with a scope can additionally ruin our attempt at finding an
+ # inverse, so we exclude reflections with scopes.
+ def can_find_inverse_of_automatically?(reflection)
+ reflection.options[:inverse_of] != false &&
+ VALID_AUTOMATIC_INVERSE_MACROS.include?(reflection.macro) &&
+ !INVALID_AUTOMATIC_INVERSE_OPTIONS.any? { |opt| reflection.options[opt] } &&
+ !reflection.scope
+ end
+
+ def derive_class_name
+ class_name = name.to_s
+ class_name = class_name.singularize if collection?
+ class_name.camelize
+ end
+
+ def derive_foreign_key
+ if belongs_to?
+ "#{name}_id"
+ elsif options[:as]
+ "#{options[:as]}_id"
+ else
+ active_record.name.foreign_key
+ end
+ end
+
+ def derive_join_table
+ ModelSchema.derive_join_table_name active_record.table_name, klass.table_name
+ end
+
+ def primary_key(klass)
+ klass.primary_key || raise(UnknownPrimaryKey.new(klass))
+ end
+ end
+
+ class HasManyReflection < AssociationReflection # :nodoc:
+ def macro; :has_many; end
+
+ def collection?; true; end
+
+ def association_class
+ if options[:through]
+ Associations::HasManyThroughAssociation
+ else
+ Associations::HasManyAssociation
+ end
+ end
+ end
+
+ class HasOneReflection < AssociationReflection # :nodoc:
+ def macro; :has_one; end
+
+ def has_one?; true; end
+
+ def association_class
+ if options[:through]
+ Associations::HasOneThroughAssociation
+ else
+ Associations::HasOneAssociation
+ end
+ end
+
+ private
+
+ def calculate_constructable(macro, options)
+ !options[:through]
+ end
+ end
+
+ class BelongsToReflection < AssociationReflection # :nodoc:
+ def macro; :belongs_to; end
+
+ def belongs_to?; true; end
+
+ def association_class
+ if polymorphic?
+ Associations::BelongsToPolymorphicAssociation
+ else
+ Associations::BelongsToAssociation
+ end
+ end
+
+ def join_id_for(owner) # :nodoc:
+ owner[foreign_key]
+ end
+
+ private
+
+ def calculate_constructable(macro, options)
+ !polymorphic?
+ end
+
+ def join_fk
+ foreign_key
+ end
+
+ def join_pk(klass)
+ polymorphic? ? association_primary_key(klass) : association_primary_key
+ end
+ end
+
+ class HasAndBelongsToManyReflection < AssociationReflection # :nodoc:
+ def macro; :has_and_belongs_to_many; end
+
+ def collection?
+ true
+ end
+ end
+
+ # Holds all the metadata about a :through association as it was specified
+ # in the Active Record class.
+ class ThroughReflection < AbstractReflection #:nodoc:
+ delegate :foreign_key, :foreign_type, :association_foreign_key,
+ :active_record_primary_key, :type, :get_join_keys, to: :source_reflection
+
+ def initialize(delegate_reflection)
+ @delegate_reflection = delegate_reflection
+ @klass = delegate_reflection.options[:anonymous_class]
+ @source_reflection_name = delegate_reflection.options[:source]
+ end
+
+ def through_reflection?
+ true
+ end
+
+ def klass
+ @klass ||= delegate_reflection.compute_class(class_name)
+ end
+
+ # Returns the source of the through reflection. It checks both a singularized
+ # and pluralized form for <tt>:belongs_to</tt> or <tt>:has_many</tt>.
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :taggings
+ # has_many :tags, through: :taggings
+ # end
+ #
+ # class Tagging < ActiveRecord::Base
+ # belongs_to :post
+ # belongs_to :tag
+ # end
+ #
+ # tags_reflection = Post.reflect_on_association(:tags)
+ # tags_reflection.source_reflection
+ # # => <ActiveRecord::Reflection::BelongsToReflection: @name=:tag, @active_record=Tagging, @plural_name="tags">
+ #
+ def source_reflection
+ through_reflection.klass._reflect_on_association(source_reflection_name)
+ end
+
+ # Returns the AssociationReflection object specified in the <tt>:through</tt> option
+ # of a HasManyThrough or HasOneThrough association.
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :taggings
+ # has_many :tags, through: :taggings
+ # end
+ #
+ # tags_reflection = Post.reflect_on_association(:tags)
+ # tags_reflection.through_reflection
+ # # => <ActiveRecord::Reflection::HasManyReflection: @name=:taggings, @active_record=Post, @plural_name="taggings">
+ #
+ def through_reflection
+ active_record._reflect_on_association(options[:through])
+ end
+
+ # Returns an array of reflections which are involved in this association. Each item in the
+ # array corresponds to a table which will be part of the query for this association.
+ #
+ # The chain is built by recursively calling #chain on the source reflection and the through
+ # reflection. The base case for the recursion is a normal association, which just returns
+ # [self] as its #chain.
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :taggings
+ # has_many :tags, through: :taggings
+ # end
+ #
+ # tags_reflection = Post.reflect_on_association(:tags)
+ # tags_reflection.chain
+ # # => [<ActiveRecord::Reflection::ThroughReflection: @delegate_reflection=#<ActiveRecord::Reflection::HasManyReflection: @name=:tags...>,
+ # <ActiveRecord::Reflection::HasManyReflection: @name=:taggings, @options={}, @active_record=Post>]
+ #
+ def collect_join_chain
+ collect_join_reflections [self]
+ end
+
+ # This is for clearing cache on the reflection. Useful for tests that need to compare
+ # SQL queries on associations.
+ def clear_association_scope_cache # :nodoc:
+ delegate_reflection.clear_association_scope_cache
+ source_reflection.clear_association_scope_cache
+ through_reflection.clear_association_scope_cache
+ end
+
+ def scopes
+ source_reflection.scopes + super
+ end
+
+ def join_scopes(table, predicate_builder) # :nodoc:
+ source_reflection.join_scopes(table, predicate_builder) + super
+ end
+
+ def source_type_scope
+ through_reflection.klass.where(foreign_type => options[:source_type])
+ end
+
+ def has_scope?
+ scope || options[:source_type] ||
+ source_reflection.has_scope? ||
+ through_reflection.has_scope?
+ end
+
+ # A through association is nested if there would be more than one join table
+ def nested?
+ source_reflection.through_reflection? || through_reflection.through_reflection?
+ end
+
+ # We want to use the klass from this reflection, rather than just delegate straight to
+ # the source_reflection, because the source_reflection may be polymorphic. We still
+ # need to respect the source_reflection's :primary_key option, though.
+ def association_primary_key(klass = nil)
+ # Get the "actual" source reflection if the immediate source reflection has a
+ # source reflection itself
+ actual_source_reflection.options[:primary_key] || primary_key(klass || self.klass)
+ end
+
+ def association_primary_key_type
+ klass.type_for_attribute(association_primary_key.to_s)
+ end
+
+ # Gets an array of possible <tt>:through</tt> source reflection names in both singular and plural form.
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :taggings
+ # has_many :tags, through: :taggings
+ # end
+ #
+ # tags_reflection = Post.reflect_on_association(:tags)
+ # tags_reflection.source_reflection_names
+ # # => [:tag, :tags]
+ #
+ def source_reflection_names
+ options[:source] ? [options[:source]] : [name.to_s.singularize, name].uniq
+ end
+
+ def source_reflection_name # :nodoc:
+ return @source_reflection_name if @source_reflection_name
+
+ names = [name.to_s.singularize, name].collect(&:to_sym).uniq
+ names = names.find_all { |n|
+ through_reflection.klass._reflect_on_association(n)
+ }
+
+ if names.length > 1
+ raise AmbiguousSourceReflectionForThroughAssociation.new(
+ active_record.name,
+ macro,
+ name,
+ options,
+ source_reflection_names
+ )
+ end
+
+ @source_reflection_name = names.first
+ end
+
+ def source_options
+ source_reflection.options
+ end
+
+ def through_options
+ through_reflection.options
+ end
+
+ def join_id_for(owner) # :nodoc:
+ source_reflection.join_id_for(owner)
+ end
+
+ def check_validity!
+ if through_reflection.nil?
+ raise HasManyThroughAssociationNotFoundError.new(active_record.name, self)
+ end
+
+ if through_reflection.polymorphic?
+ if has_one?
+ raise HasOneAssociationPolymorphicThroughError.new(active_record.name, self)
+ else
+ raise HasManyThroughAssociationPolymorphicThroughError.new(active_record.name, self)
+ end
+ end
+
+ if source_reflection.nil?
+ raise HasManyThroughSourceAssociationNotFoundError.new(self)
+ end
+
+ if options[:source_type] && !source_reflection.polymorphic?
+ raise HasManyThroughAssociationPointlessSourceTypeError.new(active_record.name, self, source_reflection)
+ end
+
+ if source_reflection.polymorphic? && options[:source_type].nil?
+ raise HasManyThroughAssociationPolymorphicSourceError.new(active_record.name, self, source_reflection)
+ end
+
+ if has_one? && through_reflection.collection?
+ raise HasOneThroughCantAssociateThroughCollection.new(active_record.name, self, through_reflection)
+ end
+
+ if parent_reflection.nil?
+ reflections = active_record.reflections.keys.map(&:to_sym)
+
+ if reflections.index(through_reflection.name) > reflections.index(name)
+ raise HasManyThroughOrderError.new(active_record.name, self, through_reflection)
+ end
+ end
+
+ check_validity_of_inverse!
+ end
+
+ def constraints
+ scope_chain = source_reflection.constraints
+ scope_chain << scope if scope
+ scope_chain
+ end
+
+ def add_as_source(seed)
+ collect_join_reflections seed
+ end
+
+ def add_as_polymorphic_through(reflection, seed)
+ collect_join_reflections(seed + [PolymorphicReflection.new(self, reflection)])
+ end
+
+ def add_as_through(seed)
+ collect_join_reflections(seed + [self])
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+ attr_reader :delegate_reflection
+
+ def actual_source_reflection # FIXME: this is a horrible name
+ source_reflection.actual_source_reflection
+ end
+
+ private
+ def collect_join_reflections(seed)
+ a = source_reflection.add_as_source seed
+ if options[:source_type]
+ through_reflection.add_as_polymorphic_through self, a
+ else
+ through_reflection.add_as_through a
+ end
+ end
+
+ def primary_key(klass)
+ klass.primary_key || raise(UnknownPrimaryKey.new(klass))
+ end
+
+ def inverse_name; delegate_reflection.send(:inverse_name); end
+
+ def derive_class_name
+ # get the class_name of the belongs_to association of the through reflection
+ options[:source_type] || source_reflection.class_name
+ end
+
+ delegate_methods = AssociationReflection.public_instance_methods -
+ public_instance_methods
+
+ delegate(*delegate_methods, to: :delegate_reflection)
+ end
+
+ class PolymorphicReflection < AbstractReflection # :nodoc:
+ def initialize(reflection, previous_reflection)
+ @reflection = reflection
+ @previous_reflection = previous_reflection
+ end
+
+ def scopes
+ scopes = @previous_reflection.scopes
+ if @previous_reflection.options[:source_type]
+ scopes + [@previous_reflection.source_type_scope]
+ else
+ scopes
+ end
+ end
+
+ def join_scopes(table, predicate_builder) # :nodoc:
+ scopes = @previous_reflection.join_scopes(table, predicate_builder) + super
+ if @previous_reflection.options[:source_type]
+ scopes + [@previous_reflection.source_type_scope]
+ else
+ scopes
+ end
+ end
+
+ def klass
+ @reflection.klass
+ end
+
+ def scope
+ @reflection.scope
+ end
+
+ def table_name
+ @reflection.table_name
+ end
+
+ def plural_name
+ @reflection.plural_name
+ end
+
+ def type
+ @reflection.type
+ end
+
+ def constraints
+ @reflection.constraints + [source_type_info]
+ end
+
+ def source_type_info
+ type = @previous_reflection.foreign_type
+ source_type = @previous_reflection.options[:source_type]
+ lambda { |object| where(type => source_type) }
+ end
+
+ def get_join_keys(association_klass)
+ @reflection.get_join_keys(association_klass)
+ end
+ end
+
+ class RuntimeReflection < PolymorphicReflection # :nodoc:
+ attr_accessor :next
+
+ def initialize(reflection, association)
+ @reflection = reflection
+ @association = association
+ end
+
+ def klass
+ @association.klass
+ end
+
+ def table_name
+ klass.table_name
+ end
+
+ def constraints
+ @reflection.constraints
+ end
+
+ def source_type_info
+ @reflection.source_type_info
+ end
+
+ def alias_candidate(name)
+ "#{plural_name}_#{name}_join"
+ end
+
+ def alias_name
+ Arel::Table.new(table_name, type_caster: klass.type_caster)
+ end
+
+ def all_includes; yield; end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation.rb b/activerecord/lib/active_record/relation.rb
new file mode 100644
index 0000000000..caabad6055
--- /dev/null
+++ b/activerecord/lib/active_record/relation.rb
@@ -0,0 +1,719 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record \Relation
+ class Relation
+ MULTI_VALUE_METHODS = [:includes, :eager_load, :preload, :select, :group,
+ :order, :joins, :left_outer_joins, :references,
+ :extending, :unscope]
+
+ SINGLE_VALUE_METHODS = [:limit, :offset, :lock, :readonly, :reordering,
+ :reverse_order, :distinct, :create_with, :skip_query_cache]
+ CLAUSE_METHODS = [:where, :having, :from]
+ INVALID_METHODS_FOR_DELETE_ALL = [:limit, :distinct, :offset, :group, :having]
+
+ VALUE_METHODS = MULTI_VALUE_METHODS + SINGLE_VALUE_METHODS + CLAUSE_METHODS
+
+ include Enumerable
+ include FinderMethods, Calculations, SpawnMethods, QueryMethods, Batches, Explain, Delegation
+
+ attr_reader :table, :klass, :loaded, :predicate_builder
+ alias :model :klass
+ alias :loaded? :loaded
+ alias :locked? :lock_value
+
+ def initialize(klass, table, predicate_builder, values = {})
+ @klass = klass
+ @table = table
+ @values = values
+ @offsets = {}
+ @loaded = false
+ @predicate_builder = predicate_builder
+ end
+
+ def initialize_copy(other)
+ @values = @values.dup
+ reset
+ end
+
+ def insert(values) # :nodoc:
+ primary_key_value = nil
+
+ if primary_key && Hash === values
+ primary_key_value = values[values.keys.find { |k|
+ k.name == primary_key
+ }]
+
+ if !primary_key_value && klass.prefetch_primary_key?
+ primary_key_value = klass.next_sequence_value
+ values[arel_attribute(klass.primary_key)] = primary_key_value
+ end
+ end
+
+ im = arel.create_insert
+ im.into @table
+
+ substitutes = substitute_values values
+
+ if values.empty? # empty insert
+ im.values = Arel.sql(connection.empty_insert_statement_value)
+ else
+ im.insert substitutes
+ end
+
+ @klass.connection.insert(
+ im,
+ "SQL",
+ primary_key || false,
+ primary_key_value,
+ nil,
+ )
+ end
+
+ def _update_record(values, id, id_was) # :nodoc:
+ substitutes = substitute_values values
+
+ scope = @klass.unscoped
+
+ if @klass.finder_needs_type_condition?
+ scope.unscope!(where: @klass.inheritance_column)
+ end
+
+ relation = scope.where(@klass.primary_key => (id_was || id))
+ um = relation
+ .arel
+ .compile_update(substitutes, @klass.primary_key)
+
+ @klass.connection.update(
+ um,
+ "SQL",
+ )
+ end
+
+ def substitute_values(values) # :nodoc:
+ values.map do |arel_attr, value|
+ bind = predicate_builder.build_bind_attribute(arel_attr.name, value)
+ [arel_attr, bind]
+ end
+ end
+
+ def arel_attribute(name) # :nodoc:
+ klass.arel_attribute(name, table)
+ end
+
+ # Initializes new record from relation while maintaining the current
+ # scope.
+ #
+ # Expects arguments in the same format as {ActiveRecord::Base.new}[rdoc-ref:Core.new].
+ #
+ # users = User.where(name: 'DHH')
+ # user = users.new # => #<User id: nil, name: "DHH", created_at: nil, updated_at: nil>
+ #
+ # You can also pass a block to new with the new record as argument:
+ #
+ # user = users.new { |user| user.name = 'Oscar' }
+ # user.name # => Oscar
+ def new(*args, &block)
+ scoping { @klass.new(*args, &block) }
+ end
+
+ alias build new
+
+ # Tries to create a new record with the same scoped attributes
+ # defined in the relation. Returns the initialized object if validation fails.
+ #
+ # Expects arguments in the same format as
+ # {ActiveRecord::Base.create}[rdoc-ref:Persistence::ClassMethods#create].
+ #
+ # ==== Examples
+ #
+ # users = User.where(name: 'Oscar')
+ # users.create # => #<User id: 3, name: "Oscar", ...>
+ #
+ # users.create(name: 'fxn')
+ # users.create # => #<User id: 4, name: "fxn", ...>
+ #
+ # users.create { |user| user.name = 'tenderlove' }
+ # # => #<User id: 5, name: "tenderlove", ...>
+ #
+ # users.create(name: nil) # validation on name
+ # # => #<User id: nil, name: nil, ...>
+ def create(*args, &block)
+ scoping { @klass.create(*args, &block) }
+ end
+
+ # Similar to #create, but calls
+ # {create!}[rdoc-ref:Persistence::ClassMethods#create!]
+ # on the base class. Raises an exception if a validation error occurs.
+ #
+ # Expects arguments in the same format as
+ # {ActiveRecord::Base.create!}[rdoc-ref:Persistence::ClassMethods#create!].
+ def create!(*args, &block)
+ scoping { @klass.create!(*args, &block) }
+ end
+
+ def first_or_create(attributes = nil, &block) # :nodoc:
+ first || create(attributes, &block)
+ end
+
+ def first_or_create!(attributes = nil, &block) # :nodoc:
+ first || create!(attributes, &block)
+ end
+
+ def first_or_initialize(attributes = nil, &block) # :nodoc:
+ first || new(attributes, &block)
+ end
+
+ # Finds the first record with the given attributes, or creates a record
+ # with the attributes if one is not found:
+ #
+ # # Find the first user named "Penélope" or create a new one.
+ # User.find_or_create_by(first_name: 'Penélope')
+ # # => #<User id: 1, first_name: "Penélope", last_name: nil>
+ #
+ # # Find the first user named "Penélope" or create a new one.
+ # # We already have one so the existing record will be returned.
+ # User.find_or_create_by(first_name: 'Penélope')
+ # # => #<User id: 1, first_name: "Penélope", last_name: nil>
+ #
+ # # Find the first user named "Scarlett" or create a new one with
+ # # a particular last name.
+ # User.create_with(last_name: 'Johansson').find_or_create_by(first_name: 'Scarlett')
+ # # => #<User id: 2, first_name: "Scarlett", last_name: "Johansson">
+ #
+ # This method accepts a block, which is passed down to #create. The last example
+ # above can be alternatively written this way:
+ #
+ # # Find the first user named "Scarlett" or create a new one with a
+ # # different last name.
+ # User.find_or_create_by(first_name: 'Scarlett') do |user|
+ # user.last_name = 'Johansson'
+ # end
+ # # => #<User id: 2, first_name: "Scarlett", last_name: "Johansson">
+ #
+ # This method always returns a record, but if creation was attempted and
+ # failed due to validation errors it won't be persisted, you get what
+ # #create returns in such situation.
+ #
+ # Please note *this method is not atomic*, it runs first a SELECT, and if
+ # there are no results an INSERT is attempted. If there are other threads
+ # or processes there is a race condition between both calls and it could
+ # be the case that you end up with two similar records.
+ #
+ # Whether that is a problem or not depends on the logic of the
+ # application, but in the particular case in which rows have a UNIQUE
+ # constraint an exception may be raised, just retry:
+ #
+ # begin
+ # CreditAccount.transaction(requires_new: true) do
+ # CreditAccount.find_or_create_by(user_id: user.id)
+ # end
+ # rescue ActiveRecord::RecordNotUnique
+ # retry
+ # end
+ #
+ def find_or_create_by(attributes, &block)
+ find_by(attributes) || create(attributes, &block)
+ end
+
+ # Like #find_or_create_by, but calls
+ # {create!}[rdoc-ref:Persistence::ClassMethods#create!] so an exception
+ # is raised if the created record is invalid.
+ def find_or_create_by!(attributes, &block)
+ find_by(attributes) || create!(attributes, &block)
+ end
+
+ # Like #find_or_create_by, but calls {new}[rdoc-ref:Core#new]
+ # instead of {create}[rdoc-ref:Persistence::ClassMethods#create].
+ def find_or_initialize_by(attributes, &block)
+ find_by(attributes) || new(attributes, &block)
+ end
+
+ # Runs EXPLAIN on the query or queries triggered by this relation and
+ # returns the result as a string. The string is formatted imitating the
+ # ones printed by the database shell.
+ #
+ # Note that this method actually runs the queries, since the results of some
+ # are needed by the next ones when eager loading is going on.
+ #
+ # Please see further details in the
+ # {Active Record Query Interface guide}[http://guides.rubyonrails.org/active_record_querying.html#running-explain].
+ def explain
+ exec_explain(collecting_queries_for_explain { exec_queries })
+ end
+
+ # Converts relation objects to Array.
+ def to_a
+ records.dup
+ end
+
+ def records # :nodoc:
+ load
+ @records
+ end
+
+ # Serializes the relation objects Array.
+ def encode_with(coder)
+ coder.represent_seq(nil, records)
+ end
+
+ # Returns size of the records.
+ def size
+ loaded? ? @records.length : count(:all)
+ end
+
+ # Returns true if there are no records.
+ def empty?
+ return @records.empty? if loaded?
+ !exists?
+ end
+
+ # Returns true if there are no records.
+ def none?
+ return super if block_given?
+ empty?
+ end
+
+ # Returns true if there are any records.
+ def any?
+ return super if block_given?
+ !empty?
+ end
+
+ # Returns true if there is exactly one record.
+ def one?
+ return super if block_given?
+ limit_value ? records.one? : size == 1
+ end
+
+ # Returns true if there is more than one record.
+ def many?
+ return super if block_given?
+ limit_value ? records.many? : size > 1
+ end
+
+ # Returns a cache key that can be used to identify the records fetched by
+ # this query. The cache key is built with a fingerprint of the sql query,
+ # the number of records matched by the query and a timestamp of the last
+ # updated record. When a new record comes to match the query, or any of
+ # the existing records is updated or deleted, the cache key changes.
+ #
+ # Product.where("name like ?", "%Cosmic Encounter%").cache_key
+ # # => "products/query-1850ab3d302391b85b8693e941286659-1-20150714212553907087000"
+ #
+ # If the collection is loaded, the method will iterate through the records
+ # to generate the timestamp, otherwise it will trigger one SQL query like:
+ #
+ # SELECT COUNT(*), MAX("products"."updated_at") FROM "products" WHERE (name like '%Cosmic Encounter%')
+ #
+ # You can also pass a custom timestamp column to fetch the timestamp of the
+ # last updated record.
+ #
+ # Product.where("name like ?", "%Game%").cache_key(:last_reviewed_at)
+ #
+ # You can customize the strategy to generate the key on a per model basis
+ # overriding ActiveRecord::Base#collection_cache_key.
+ def cache_key(timestamp_column = :updated_at)
+ @cache_keys ||= {}
+ @cache_keys[timestamp_column] ||= @klass.collection_cache_key(self, timestamp_column)
+ end
+
+ # Scope all queries to the current scope.
+ #
+ # Comment.where(post_id: 1).scoping do
+ # Comment.first
+ # end
+ # # => SELECT "comments".* FROM "comments" WHERE "comments"."post_id" = 1 ORDER BY "comments"."id" ASC LIMIT 1
+ #
+ # Please check unscoped if you want to remove all previous scopes (including
+ # the default_scope) during the execution of a block.
+ def scoping
+ previous, klass.current_scope = klass.current_scope(true), self
+ yield
+ ensure
+ klass.current_scope = previous
+ end
+
+ # Updates all records in the current relation with details given. This method constructs a single SQL UPDATE
+ # statement and sends it straight to the database. It does not instantiate the involved models and it does not
+ # trigger Active Record callbacks or validations. However, values passed to #update_all will still go through
+ # Active Record's normal type casting and serialization.
+ #
+ # ==== Parameters
+ #
+ # * +updates+ - A string, array, or hash representing the SET part of an SQL statement.
+ #
+ # ==== Examples
+ #
+ # # Update all customers with the given attributes
+ # Customer.update_all wants_email: true
+ #
+ # # Update all books with 'Rails' in their title
+ # Book.where('title LIKE ?', '%Rails%').update_all(author: 'David')
+ #
+ # # Update all books that match conditions, but limit it to 5 ordered by date
+ # Book.where('title LIKE ?', '%Rails%').order(:created_at).limit(5).update_all(author: 'David')
+ #
+ # # Update all invoices and set the number column to its id value.
+ # Invoice.update_all('number = id')
+ def update_all(updates)
+ raise ArgumentError, "Empty list of attributes to change" if updates.blank?
+
+ stmt = Arel::UpdateManager.new
+
+ stmt.set Arel.sql(@klass.send(:sanitize_sql_for_assignment, updates))
+ stmt.table(table)
+
+ if has_join_values?
+ @klass.connection.join_to_update(stmt, arel, arel_attribute(primary_key))
+ else
+ stmt.key = arel_attribute(primary_key)
+ stmt.take(arel.limit)
+ stmt.order(*arel.orders)
+ stmt.wheres = arel.constraints
+ end
+
+ @klass.connection.update stmt, "SQL"
+ end
+
+ # Updates an object (or multiple objects) and saves it to the database, if validations pass.
+ # The resulting object is returned whether the object was saved successfully to the database or not.
+ #
+ # ==== Parameters
+ #
+ # * +id+ - This should be the id or an array of ids to be updated.
+ # * +attributes+ - This should be a hash of attributes or an array of hashes.
+ #
+ # ==== Examples
+ #
+ # # Updates one record
+ # Person.update(15, user_name: 'Samuel', group: 'expert')
+ #
+ # # Updates multiple records
+ # people = { 1 => { "first_name" => "David" }, 2 => { "first_name" => "Jeremy" } }
+ # Person.update(people.keys, people.values)
+ #
+ # # Updates multiple records from the result of a relation
+ # people = Person.where(group: 'expert')
+ # people.update(group: 'masters')
+ #
+ # Note: Updating a large number of records will run an
+ # UPDATE query for each record, which may cause a performance
+ # issue. When running callbacks is not needed for each record update,
+ # it is preferred to use #update_all for updating all records
+ # in a single query.
+ def update(id = :all, attributes)
+ if id.is_a?(Array)
+ id.map.with_index { |one_id, idx| update(one_id, attributes[idx]) }
+ elsif id == :all
+ records.each { |record| record.update(attributes) }
+ else
+ if ActiveRecord::Base === id
+ raise ArgumentError, <<-MSG.squish
+ You are passing an instance of ActiveRecord::Base to `update`.
+ Please pass the id of the object by calling `.id`.
+ MSG
+ end
+ object = find(id)
+ object.update(attributes)
+ object
+ end
+ end
+
+ # Destroys the records by instantiating each
+ # record and calling its {#destroy}[rdoc-ref:Persistence#destroy] method.
+ # Each object's callbacks are executed (including <tt>:dependent</tt> association options).
+ # Returns the collection of objects that were destroyed; each will be frozen, to
+ # reflect that no changes should be made (since they can't be persisted).
+ #
+ # Note: Instantiation, callback execution, and deletion of each
+ # record can be time consuming when you're removing many records at
+ # once. It generates at least one SQL +DELETE+ query per record (or
+ # possibly more, to enforce your callbacks). If you want to delete many
+ # rows quickly, without concern for their associations or callbacks, use
+ # #delete_all instead.
+ #
+ # ==== Examples
+ #
+ # Person.where(age: 0..18).destroy_all
+ def destroy_all
+ records.each(&:destroy).tap { reset }
+ end
+
+ # Destroy an object (or multiple objects) that has the given id. The object is instantiated first,
+ # therefore all callbacks and filters are fired off before the object is deleted. This method is
+ # less efficient than #delete but allows cleanup methods and other actions to be run.
+ #
+ # This essentially finds the object (or multiple objects) with the given id, creates a new object
+ # from the attributes, and then calls destroy on it.
+ #
+ # ==== Parameters
+ #
+ # * +id+ - Can be either an Integer or an Array of Integers.
+ #
+ # ==== Examples
+ #
+ # # Destroy a single object
+ # Todo.destroy(1)
+ #
+ # # Destroy multiple objects
+ # todos = [1,2,3]
+ # Todo.destroy(todos)
+ def destroy(id)
+ if id.is_a?(Array)
+ id.map { |one_id| destroy(one_id) }
+ else
+ find(id).destroy
+ end
+ end
+
+ # Deletes the records without instantiating the records
+ # first, and hence not calling the {#destroy}[rdoc-ref:Persistence#destroy]
+ # method nor invoking callbacks.
+ # This is a single SQL DELETE statement that goes straight to the database, much more
+ # efficient than #destroy_all. Be careful with relations though, in particular
+ # <tt>:dependent</tt> rules defined on associations are not honored. Returns the
+ # number of rows affected.
+ #
+ # Post.where(person_id: 5).where(category: ['Something', 'Else']).delete_all
+ #
+ # Both calls delete the affected posts all at once with a single DELETE statement.
+ # If you need to destroy dependent associations or call your <tt>before_*</tt> or
+ # +after_destroy+ callbacks, use the #destroy_all method instead.
+ #
+ # If an invalid method is supplied, #delete_all raises an ActiveRecordError:
+ #
+ # Post.limit(100).delete_all
+ # # => ActiveRecord::ActiveRecordError: delete_all doesn't support limit
+ def delete_all
+ invalid_methods = INVALID_METHODS_FOR_DELETE_ALL.select do |method|
+ value = get_value(method)
+ SINGLE_VALUE_METHODS.include?(method) ? value : value.any?
+ end
+ if invalid_methods.any?
+ raise ActiveRecordError.new("delete_all doesn't support #{invalid_methods.join(', ')}")
+ end
+
+ stmt = Arel::DeleteManager.new
+ stmt.from(table)
+
+ if has_join_values?
+ @klass.connection.join_to_delete(stmt, arel, arel_attribute(primary_key))
+ else
+ stmt.wheres = arel.constraints
+ end
+
+ affected = @klass.connection.delete(stmt, "SQL")
+
+ reset
+ affected
+ end
+
+ # Deletes the row with a primary key matching the +id+ argument, using a
+ # SQL +DELETE+ statement, and returns the number of rows deleted. Active
+ # Record objects are not instantiated, so the object's callbacks are not
+ # executed, including any <tt>:dependent</tt> association options.
+ #
+ # You can delete multiple rows at once by passing an Array of <tt>id</tt>s.
+ #
+ # Note: Although it is often much faster than the alternative,
+ # #destroy, skipping callbacks might bypass business logic in
+ # your application that ensures referential integrity or performs other
+ # essential jobs.
+ #
+ # ==== Examples
+ #
+ # # Delete a single row
+ # Todo.delete(1)
+ #
+ # # Delete multiple rows
+ # Todo.delete([2,3,4])
+ def delete(id_or_array)
+ where(primary_key => id_or_array).delete_all
+ end
+
+ # Causes the records to be loaded from the database if they have not
+ # been loaded already. You can use this if for some reason you need
+ # to explicitly load some records before actually using them. The
+ # return value is the relation itself, not the records.
+ #
+ # Post.where(published: true).load # => #<ActiveRecord::Relation>
+ def load(&block)
+ exec_queries(&block) unless loaded?
+
+ self
+ end
+
+ # Forces reloading of relation.
+ def reload
+ reset
+ load
+ end
+
+ def reset
+ @to_sql = @arel = @loaded = @should_eager_load = nil
+ @records = [].freeze
+ @offsets = {}
+ self
+ end
+
+ # Returns sql statement for the relation.
+ #
+ # User.where(name: 'Oscar').to_sql
+ # # => SELECT "users".* FROM "users" WHERE "users"."name" = 'Oscar'
+ def to_sql
+ @to_sql ||= begin
+ relation = self
+
+ if eager_loading?
+ find_with_associations { |rel| relation = rel }
+ end
+
+ conn = klass.connection
+ conn.unprepared_statement {
+ sql, _ = conn.to_sql(relation.arel)
+ sql
+ }
+ end
+ end
+
+ # Returns a hash of where conditions.
+ #
+ # User.where(name: 'Oscar').where_values_hash
+ # # => {name: "Oscar"}
+ def where_values_hash(relation_table_name = table_name)
+ where_clause.to_h(relation_table_name)
+ end
+
+ def scope_for_create
+ where_values_hash.merge!(create_with_value.stringify_keys)
+ end
+
+ # Returns true if relation needs eager loading.
+ def eager_loading?
+ @should_eager_load ||=
+ eager_load_values.any? ||
+ includes_values.any? && (joined_includes_values.any? || references_eager_loaded_tables?)
+ end
+
+ # Joins that are also marked for preloading. In which case we should just eager load them.
+ # Note that this is a naive implementation because we could have strings and symbols which
+ # represent the same association, but that aren't matched by this. Also, we could have
+ # nested hashes which partially match, e.g. { a: :b } & { a: [:b, :c] }
+ def joined_includes_values
+ includes_values & joins_values
+ end
+
+ # Compares two relations for equality.
+ def ==(other)
+ case other
+ when Associations::CollectionProxy, AssociationRelation
+ self == other.records
+ when Relation
+ other.to_sql == to_sql
+ when Array
+ records == other
+ end
+ end
+
+ def pretty_print(q)
+ q.pp(records)
+ end
+
+ # Returns true if relation is blank.
+ def blank?
+ records.blank?
+ end
+
+ def values
+ @values.dup
+ end
+
+ def inspect
+ subject = loaded? ? records : self
+ entries = subject.take([limit_value, 11].compact.min).map!(&:inspect)
+
+ entries[10] = "..." if entries.size == 11
+
+ "#<#{self.class.name} [#{entries.join(', ')}]>"
+ end
+
+ def empty_scope? # :nodoc:
+ @values == klass.unscoped.values
+ end
+
+ def has_limit_or_offset? # :nodoc:
+ limit_value || offset_value
+ end
+
+ protected
+
+ def load_records(records)
+ @records = records.freeze
+ @loaded = true
+ end
+
+ private
+
+ def has_join_values?
+ joins_values.any? || left_outer_joins_values.any?
+ end
+
+ def exec_queries(&block)
+ skip_query_cache_if_necessary do
+ @records = eager_loading? ? find_with_associations.freeze : @klass.find_by_sql(arel, &block).freeze
+
+ preload = preload_values
+ preload += includes_values unless eager_loading?
+ preloader = nil
+ preload.each do |associations|
+ preloader ||= build_preloader
+ preloader.preload @records, associations
+ end
+
+ @records.each(&:readonly!) if readonly_value
+
+ @loaded = true
+ @records
+ end
+ end
+
+ def skip_query_cache_if_necessary
+ if skip_query_cache_value
+ uncached do
+ yield
+ end
+ else
+ yield
+ end
+ end
+
+ def build_preloader
+ ActiveRecord::Associations::Preloader.new
+ end
+
+ def references_eager_loaded_tables?
+ joined_tables = arel.join_sources.map do |join|
+ if join.is_a?(Arel::Nodes::StringJoin)
+ tables_in_string(join.left)
+ else
+ [join.left.table_name, join.left.table_alias]
+ end
+ end
+
+ joined_tables += [table.name, table.table_alias]
+
+ # always convert table names to downcase as in Oracle quoted table names are in uppercase
+ joined_tables = joined_tables.flatten.compact.map(&:downcase).uniq
+
+ (references_values - joined_tables).any?
+ end
+
+ def tables_in_string(string)
+ return [] if string.blank?
+ # always convert table names to downcase as in Oracle quoted table names are in uppercase
+ # ignore raw_sql_ that is used by Oracle adapter as alias for limit/offset subqueries
+ string.scan(/([a-zA-Z_][.\w]+).?\./).flatten.map(&:downcase).uniq - ["raw_sql_"]
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/batches.rb b/activerecord/lib/active_record/relation/batches.rb
new file mode 100644
index 0000000000..141ad176ea
--- /dev/null
+++ b/activerecord/lib/active_record/relation/batches.rb
@@ -0,0 +1,275 @@
+# frozen_string_literal: true
+
+require_relative "batches/batch_enumerator"
+
+module ActiveRecord
+ module Batches
+ ORDER_IGNORE_MESSAGE = "Scoped order is ignored, it's forced to be batch order."
+
+ # Looping through a collection of records from the database
+ # (using the Scoping::Named::ClassMethods.all method, for example)
+ # is very inefficient since it will try to instantiate all the objects at once.
+ #
+ # In that case, batch processing methods allow you to work
+ # with the records in batches, thereby greatly reducing memory consumption.
+ #
+ # The #find_each method uses #find_in_batches with a batch size of 1000 (or as
+ # specified by the +:batch_size+ option).
+ #
+ # Person.find_each do |person|
+ # person.do_awesome_stuff
+ # end
+ #
+ # Person.where("age > 21").find_each do |person|
+ # person.party_all_night!
+ # end
+ #
+ # If you do not provide a block to #find_each, it will return an Enumerator
+ # for chaining with other methods:
+ #
+ # Person.find_each.with_index do |person, index|
+ # person.award_trophy(index + 1)
+ # end
+ #
+ # ==== Options
+ # * <tt>:batch_size</tt> - Specifies the size of the batch. Defaults to 1000.
+ # * <tt>:start</tt> - Specifies the primary key value to start from, inclusive of the value.
+ # * <tt>:finish</tt> - Specifies the primary key value to end at, inclusive of the value.
+ # * <tt>:error_on_ignore</tt> - Overrides the application config to specify if an error should be raised when
+ # an order is present in the relation.
+ #
+ # Limits are honored, and if present there is no requirement for the batch
+ # size: it can be less than, equal to, or greater than the limit.
+ #
+ # The options +start+ and +finish+ are especially useful if you want
+ # multiple workers dealing with the same processing queue. You can make
+ # worker 1 handle all the records between id 1 and 9999 and worker 2
+ # handle from 10000 and beyond by setting the +:start+ and +:finish+
+ # option on each worker.
+ #
+ # # Let's process from record 10_000 on.
+ # Person.find_each(start: 10_000) do |person|
+ # person.party_all_night!
+ # end
+ #
+ # NOTE: It's not possible to set the order. That is automatically set to
+ # ascending on the primary key ("id ASC") to make the batch ordering
+ # work. This also means that this method only works when the primary key is
+ # orderable (e.g. an integer or string).
+ #
+ # NOTE: By its nature, batch processing is subject to race conditions if
+ # other processes are modifying the database.
+ def find_each(start: nil, finish: nil, batch_size: 1000, error_on_ignore: nil)
+ if block_given?
+ find_in_batches(start: start, finish: finish, batch_size: batch_size, error_on_ignore: error_on_ignore) do |records|
+ records.each { |record| yield record }
+ end
+ else
+ enum_for(:find_each, start: start, finish: finish, batch_size: batch_size, error_on_ignore: error_on_ignore) do
+ relation = self
+ apply_limits(relation, start, finish).size
+ end
+ end
+ end
+
+ # Yields each batch of records that was found by the find options as
+ # an array.
+ #
+ # Person.where("age > 21").find_in_batches do |group|
+ # sleep(50) # Make sure it doesn't get too crowded in there!
+ # group.each { |person| person.party_all_night! }
+ # end
+ #
+ # If you do not provide a block to #find_in_batches, it will return an Enumerator
+ # for chaining with other methods:
+ #
+ # Person.find_in_batches.with_index do |group, batch|
+ # puts "Processing group ##{batch}"
+ # group.each(&:recover_from_last_night!)
+ # end
+ #
+ # To be yielded each record one by one, use #find_each instead.
+ #
+ # ==== Options
+ # * <tt>:batch_size</tt> - Specifies the size of the batch. Defaults to 1000.
+ # * <tt>:start</tt> - Specifies the primary key value to start from, inclusive of the value.
+ # * <tt>:finish</tt> - Specifies the primary key value to end at, inclusive of the value.
+ # * <tt>:error_on_ignore</tt> - Overrides the application config to specify if an error should be raised when
+ # an order is present in the relation.
+ #
+ # Limits are honored, and if present there is no requirement for the batch
+ # size: it can be less than, equal to, or greater than the limit.
+ #
+ # The options +start+ and +finish+ are especially useful if you want
+ # multiple workers dealing with the same processing queue. You can make
+ # worker 1 handle all the records between id 1 and 9999 and worker 2
+ # handle from 10000 and beyond by setting the +:start+ and +:finish+
+ # option on each worker.
+ #
+ # # Let's process from record 10_000 on.
+ # Person.find_in_batches(start: 10_000) do |group|
+ # group.each { |person| person.party_all_night! }
+ # end
+ #
+ # NOTE: It's not possible to set the order. That is automatically set to
+ # ascending on the primary key ("id ASC") to make the batch ordering
+ # work. This also means that this method only works when the primary key is
+ # orderable (e.g. an integer or string).
+ #
+ # NOTE: By its nature, batch processing is subject to race conditions if
+ # other processes are modifying the database.
+ def find_in_batches(start: nil, finish: nil, batch_size: 1000, error_on_ignore: nil)
+ relation = self
+ unless block_given?
+ return to_enum(:find_in_batches, start: start, finish: finish, batch_size: batch_size, error_on_ignore: error_on_ignore) do
+ total = apply_limits(relation, start, finish).size
+ (total - 1).div(batch_size) + 1
+ end
+ end
+
+ in_batches(of: batch_size, start: start, finish: finish, load: true, error_on_ignore: error_on_ignore) do |batch|
+ yield batch.to_a
+ end
+ end
+
+ # Yields ActiveRecord::Relation objects to work with a batch of records.
+ #
+ # Person.where("age > 21").in_batches do |relation|
+ # relation.delete_all
+ # sleep(10) # Throttle the delete queries
+ # end
+ #
+ # If you do not provide a block to #in_batches, it will return a
+ # BatchEnumerator which is enumerable.
+ #
+ # Person.in_batches.each_with_index do |relation, batch_index|
+ # puts "Processing relation ##{batch_index}"
+ # relation.delete_all
+ # end
+ #
+ # Examples of calling methods on the returned BatchEnumerator object:
+ #
+ # Person.in_batches.delete_all
+ # Person.in_batches.update_all(awesome: true)
+ # Person.in_batches.each_record(&:party_all_night!)
+ #
+ # ==== Options
+ # * <tt>:of</tt> - Specifies the size of the batch. Defaults to 1000.
+ # * <tt>:load</tt> - Specifies if the relation should be loaded. Defaults to false.
+ # * <tt>:start</tt> - Specifies the primary key value to start from, inclusive of the value.
+ # * <tt>:finish</tt> - Specifies the primary key value to end at, inclusive of the value.
+ # * <tt>:error_on_ignore</tt> - Overrides the application config to specify if an error should be raised when
+ # an order is present in the relation.
+ #
+ # Limits are honored, and if present there is no requirement for the batch
+ # size, it can be less than, equal, or greater than the limit.
+ #
+ # The options +start+ and +finish+ are especially useful if you want
+ # multiple workers dealing with the same processing queue. You can make
+ # worker 1 handle all the records between id 1 and 9999 and worker 2
+ # handle from 10000 and beyond by setting the +:start+ and +:finish+
+ # option on each worker.
+ #
+ # # Let's process from record 10_000 on.
+ # Person.in_batches(start: 10_000).update_all(awesome: true)
+ #
+ # An example of calling where query method on the relation:
+ #
+ # Person.in_batches.each do |relation|
+ # relation.update_all('age = age + 1')
+ # relation.where('age > 21').update_all(should_party: true)
+ # relation.where('age <= 21').delete_all
+ # end
+ #
+ # NOTE: If you are going to iterate through each record, you should call
+ # #each_record on the yielded BatchEnumerator:
+ #
+ # Person.in_batches.each_record(&:party_all_night!)
+ #
+ # NOTE: It's not possible to set the order. That is automatically set to
+ # ascending on the primary key ("id ASC") to make the batch ordering
+ # consistent. Therefore the primary key must be orderable, e.g. an integer
+ # or a string.
+ #
+ # NOTE: By its nature, batch processing is subject to race conditions if
+ # other processes are modifying the database.
+ def in_batches(of: 1000, start: nil, finish: nil, load: false, error_on_ignore: nil)
+ relation = self
+ unless block_given?
+ return BatchEnumerator.new(of: of, start: start, finish: finish, relation: self)
+ end
+
+ if arel.orders.present?
+ act_on_ignored_order(error_on_ignore)
+ end
+
+ batch_limit = of
+ if limit_value
+ remaining = limit_value
+ batch_limit = remaining if remaining < batch_limit
+ end
+
+ relation = relation.reorder(batch_order).limit(batch_limit)
+ relation = apply_limits(relation, start, finish)
+ relation.skip_query_cache! # Retaining the results in the query cache would undermine the point of batching
+ batch_relation = relation
+
+ loop do
+ if load
+ records = batch_relation.records
+ ids = records.map(&:id)
+ yielded_relation = where(primary_key => ids)
+ yielded_relation.load_records(records)
+ else
+ ids = batch_relation.pluck(primary_key)
+ yielded_relation = where(primary_key => ids)
+ end
+
+ break if ids.empty?
+
+ primary_key_offset = ids.last
+ raise ArgumentError.new("Primary key not included in the custom select clause") unless primary_key_offset
+
+ yield yielded_relation
+
+ break if ids.length < batch_limit
+
+ if limit_value
+ remaining -= ids.length
+
+ if remaining == 0
+ # Saves a useless iteration when the limit is a multiple of the
+ # batch size.
+ break
+ elsif remaining < batch_limit
+ relation = relation.limit(remaining)
+ end
+ end
+
+ batch_relation = relation.where(arel_attribute(primary_key).gt(primary_key_offset))
+ end
+ end
+
+ private
+
+ def apply_limits(relation, start, finish)
+ relation = relation.where(arel_attribute(primary_key).gteq(start)) if start
+ relation = relation.where(arel_attribute(primary_key).lteq(finish)) if finish
+ relation
+ end
+
+ def batch_order
+ "#{quoted_table_name}.#{quoted_primary_key} ASC"
+ end
+
+ def act_on_ignored_order(error_on_ignore)
+ raise_error = (error_on_ignore.nil? ? klass.error_on_ignored_order : error_on_ignore)
+
+ if raise_error
+ raise ArgumentError.new(ORDER_IGNORE_MESSAGE)
+ elsif logger
+ logger.warn(ORDER_IGNORE_MESSAGE)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/batches/batch_enumerator.rb b/activerecord/lib/active_record/relation/batches/batch_enumerator.rb
new file mode 100644
index 0000000000..49697da3bf
--- /dev/null
+++ b/activerecord/lib/active_record/relation/batches/batch_enumerator.rb
@@ -0,0 +1,69 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Batches
+ class BatchEnumerator
+ include Enumerable
+
+ def initialize(of: 1000, start: nil, finish: nil, relation:) #:nodoc:
+ @of = of
+ @relation = relation
+ @start = start
+ @finish = finish
+ end
+
+ # Looping through a collection of records from the database (using the
+ # +all+ method, for example) is very inefficient since it will try to
+ # instantiate all the objects at once.
+ #
+ # In that case, batch processing methods allow you to work with the
+ # records in batches, thereby greatly reducing memory consumption.
+ #
+ # Person.in_batches.each_record do |person|
+ # person.do_awesome_stuff
+ # end
+ #
+ # Person.where("age > 21").in_batches(of: 10).each_record do |person|
+ # person.party_all_night!
+ # end
+ #
+ # If you do not provide a block to #each_record, it will return an Enumerator
+ # for chaining with other methods:
+ #
+ # Person.in_batches.each_record.with_index do |person, index|
+ # person.award_trophy(index + 1)
+ # end
+ def each_record
+ return to_enum(:each_record) unless block_given?
+
+ @relation.to_enum(:in_batches, of: @of, start: @start, finish: @finish, load: true).each do |relation|
+ relation.records.each { |record| yield record }
+ end
+ end
+
+ # Delegates #delete_all, #update_all, #destroy_all methods to each batch.
+ #
+ # People.in_batches.delete_all
+ # People.where('age < 10').in_batches.destroy_all
+ # People.in_batches.update_all('age = age + 1')
+ [:delete_all, :update_all, :destroy_all].each do |method|
+ define_method(method) do |*args, &block|
+ @relation.to_enum(:in_batches, of: @of, start: @start, finish: @finish, load: false).each do |relation|
+ relation.send(method, *args, &block)
+ end
+ end
+ end
+
+ # Yields an ActiveRecord::Relation object for each batch of records.
+ #
+ # Person.in_batches.each do |relation|
+ # relation.update_all(awesome: true)
+ # end
+ def each
+ enum = @relation.to_enum(:in_batches, of: @of, start: @start, finish: @finish, load: false)
+ return enum.each { |relation| yield relation } if block_given?
+ enum
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/calculations.rb b/activerecord/lib/active_record/relation/calculations.rb
new file mode 100644
index 0000000000..d3b5be6bce
--- /dev/null
+++ b/activerecord/lib/active_record/relation/calculations.rb
@@ -0,0 +1,404 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Calculations
+ # Count the records.
+ #
+ # Person.count
+ # # => the total count of all people
+ #
+ # Person.count(:age)
+ # # => returns the total count of all people whose age is present in database
+ #
+ # Person.count(:all)
+ # # => performs a COUNT(*) (:all is an alias for '*')
+ #
+ # Person.distinct.count(:age)
+ # # => counts the number of different age values
+ #
+ # If #count is used with {Relation#group}[rdoc-ref:QueryMethods#group],
+ # it returns a Hash whose keys represent the aggregated column,
+ # and the values are the respective amounts:
+ #
+ # Person.group(:city).count
+ # # => { 'Rome' => 5, 'Paris' => 3 }
+ #
+ # If #count is used with {Relation#group}[rdoc-ref:QueryMethods#group] for multiple columns, it returns a Hash whose
+ # keys are an array containing the individual values of each column and the value
+ # of each key would be the #count.
+ #
+ # Article.group(:status, :category).count
+ # # => {["draft", "business"]=>10, ["draft", "technology"]=>4,
+ # ["published", "business"]=>0, ["published", "technology"]=>2}
+ #
+ # If #count is used with {Relation#select}[rdoc-ref:QueryMethods#select], it will count the selected columns:
+ #
+ # Person.select(:age).count
+ # # => counts the number of different age values
+ #
+ # Note: not all valid {Relation#select}[rdoc-ref:QueryMethods#select] expressions are valid #count expressions. The specifics differ
+ # between databases. In invalid cases, an error from the database is thrown.
+ def count(column_name = nil)
+ if block_given?
+ unless column_name.nil?
+ ActiveSupport::Deprecation.warn \
+ "When `count' is called with a block, it ignores other arguments. " \
+ "This behavior is now deprecated and will result in an ArgumentError in Rails 6.0."
+ end
+
+ return super()
+ end
+
+ calculate(:count, column_name)
+ end
+
+ # Calculates the average value on a given column. Returns +nil+ if there's
+ # no row. See #calculate for examples with options.
+ #
+ # Person.average(:age) # => 35.8
+ def average(column_name)
+ calculate(:average, column_name)
+ end
+
+ # Calculates the minimum value on a given column. The value is returned
+ # with the same data type of the column, or +nil+ if there's no row. See
+ # #calculate for examples with options.
+ #
+ # Person.minimum(:age) # => 7
+ def minimum(column_name)
+ calculate(:minimum, column_name)
+ end
+
+ # Calculates the maximum value on a given column. The value is returned
+ # with the same data type of the column, or +nil+ if there's no row. See
+ # #calculate for examples with options.
+ #
+ # Person.maximum(:age) # => 93
+ def maximum(column_name)
+ calculate(:maximum, column_name)
+ end
+
+ # Calculates the sum of values on a given column. The value is returned
+ # with the same data type of the column, +0+ if there's no row. See
+ # #calculate for examples with options.
+ #
+ # Person.sum(:age) # => 4562
+ def sum(column_name = nil)
+ if block_given?
+ unless column_name.nil?
+ ActiveSupport::Deprecation.warn \
+ "When `sum' is called with a block, it ignores other arguments. " \
+ "This behavior is now deprecated and will result in an ArgumentError in Rails 6.0."
+ end
+
+ return super()
+ end
+
+ calculate(:sum, column_name)
+ end
+
+ # This calculates aggregate values in the given column. Methods for #count, #sum, #average,
+ # #minimum, and #maximum have been added as shortcuts.
+ #
+ # Person.calculate(:count, :all) # The same as Person.count
+ # Person.average(:age) # SELECT AVG(age) FROM people...
+ #
+ # # Selects the minimum age for any family without any minors
+ # Person.group(:last_name).having("min(age) > 17").minimum(:age)
+ #
+ # Person.sum("2 * age")
+ #
+ # There are two basic forms of output:
+ #
+ # * Single aggregate value: The single value is type cast to Integer for COUNT, Float
+ # for AVG, and the given column's type for everything else.
+ #
+ # * Grouped values: This returns an ordered hash of the values and groups them. It
+ # takes either a column name, or the name of a belongs_to association.
+ #
+ # values = Person.group('last_name').maximum(:age)
+ # puts values["Drake"]
+ # # => 43
+ #
+ # drake = Family.find_by(last_name: 'Drake')
+ # values = Person.group(:family).maximum(:age) # Person belongs_to :family
+ # puts values[drake]
+ # # => 43
+ #
+ # values.each do |family, max_age|
+ # ...
+ # end
+ def calculate(operation, column_name)
+ if has_include?(column_name)
+ relation = construct_relation_for_association_calculations
+ relation.distinct! if operation.to_s.downcase == "count"
+
+ relation.calculate(operation, column_name)
+ else
+ perform_calculation(operation, column_name)
+ end
+ end
+
+ # Use #pluck as a shortcut to select one or more attributes without
+ # loading a bunch of records just to grab the attributes you want.
+ #
+ # Person.pluck(:name)
+ #
+ # instead of
+ #
+ # Person.all.map(&:name)
+ #
+ # Pluck returns an Array of attribute values type-casted to match
+ # the plucked column names, if they can be deduced. Plucking an SQL fragment
+ # returns String values by default.
+ #
+ # Person.pluck(:name)
+ # # SELECT people.name FROM people
+ # # => ['David', 'Jeremy', 'Jose']
+ #
+ # Person.pluck(:id, :name)
+ # # SELECT people.id, people.name FROM people
+ # # => [[1, 'David'], [2, 'Jeremy'], [3, 'Jose']]
+ #
+ # Person.distinct.pluck(:role)
+ # # SELECT DISTINCT role FROM people
+ # # => ['admin', 'member', 'guest']
+ #
+ # Person.where(age: 21).limit(5).pluck(:id)
+ # # SELECT people.id FROM people WHERE people.age = 21 LIMIT 5
+ # # => [2, 3]
+ #
+ # Person.pluck('DATEDIFF(updated_at, created_at)')
+ # # SELECT DATEDIFF(updated_at, created_at) FROM people
+ # # => ['0', '27761', '173']
+ #
+ # See also #ids.
+ #
+ def pluck(*column_names)
+ if loaded? && (column_names.map(&:to_s) - @klass.attribute_names - @klass.attribute_aliases.keys).empty?
+ return records.pluck(*column_names)
+ end
+
+ if has_include?(column_names.first)
+ construct_relation_for_association_calculations.pluck(*column_names)
+ else
+ relation = spawn
+ relation.select_values = column_names.map { |cn|
+ @klass.has_attribute?(cn) || @klass.attribute_alias?(cn) ? arel_attribute(cn) : cn
+ }
+ result = skip_query_cache_if_necessary { klass.connection.select_all(relation.arel, nil) }
+ result.cast_values(klass.attribute_types)
+ end
+ end
+
+ # Pluck all the ID's for the relation using the table's primary key
+ #
+ # Person.ids # SELECT people.id FROM people
+ # Person.joins(:companies).ids # SELECT people.id FROM people INNER JOIN companies ON companies.person_id = people.id
+ def ids
+ pluck primary_key
+ end
+
+ private
+
+ def has_include?(column_name)
+ eager_loading? || (includes_values.present? && column_name && column_name != :all)
+ end
+
+ def perform_calculation(operation, column_name)
+ operation = operation.to_s.downcase
+
+ # If #count is used with #distinct (i.e. `relation.distinct.count`) it is
+ # considered distinct.
+ distinct = distinct_value
+
+ if operation == "count"
+ column_name ||= select_for_count
+ if column_name == :all
+ if distinct && !(has_limit_or_offset? && order_values.any?)
+ column_name = primary_key
+ end
+ elsif column_name =~ /\s*DISTINCT[\s(]+/i
+ distinct = nil
+ end
+ end
+
+ if group_values.any?
+ execute_grouped_calculation(operation, column_name, distinct)
+ else
+ execute_simple_calculation(operation, column_name, distinct)
+ end
+ end
+
+ def aggregate_column(column_name)
+ return column_name if Arel::Expressions === column_name
+
+ if @klass.has_attribute?(column_name.to_s) || @klass.attribute_alias?(column_name.to_s)
+ @klass.arel_attribute(column_name)
+ else
+ Arel.sql(column_name == :all ? "*" : column_name.to_s)
+ end
+ end
+
+ def operation_over_aggregate_column(column, operation, distinct)
+ operation == "count" ? column.count(distinct) : column.send(operation)
+ end
+
+ def execute_simple_calculation(operation, column_name, distinct) #:nodoc:
+ column_alias = column_name
+
+ if operation == "count" && has_limit_or_offset?
+ # Shortcut when limit is zero.
+ return 0 if limit_value == 0
+
+ query_builder = build_count_subquery(spawn, column_name, distinct)
+ else
+ # PostgreSQL doesn't like ORDER BY when there are no GROUP BY
+ relation = unscope(:order).distinct!(false)
+
+ column = aggregate_column(column_name)
+
+ select_value = operation_over_aggregate_column(column, operation, distinct)
+
+ column_alias = select_value.alias
+ column_alias ||= @klass.connection.column_name_for_operation(operation, select_value)
+ relation.select_values = [select_value]
+
+ query_builder = relation.arel
+ end
+
+ result = skip_query_cache_if_necessary { @klass.connection.select_all(query_builder, nil) }
+ row = result.first
+ value = row && row.values.first
+ type = result.column_types.fetch(column_alias) do
+ type_for(column_name)
+ end
+
+ type_cast_calculated_value(value, type, operation)
+ end
+
+ def execute_grouped_calculation(operation, column_name, distinct) #:nodoc:
+ group_attrs = group_values
+
+ if group_attrs.first.respond_to?(:to_sym)
+ association = @klass._reflect_on_association(group_attrs.first)
+ associated = group_attrs.size == 1 && association && association.belongs_to? # only count belongs_to associations
+ group_fields = Array(associated ? association.foreign_key : group_attrs)
+ else
+ group_fields = group_attrs
+ end
+ group_fields = arel_columns(group_fields)
+
+ group_aliases = group_fields.map { |field| column_alias_for(field) }
+ group_columns = group_aliases.zip(group_fields)
+
+ if operation == "count" && column_name == :all
+ aggregate_alias = "count_all"
+ else
+ aggregate_alias = column_alias_for([operation, column_name].join(" "))
+ end
+
+ select_values = [
+ operation_over_aggregate_column(
+ aggregate_column(column_name),
+ operation,
+ distinct).as(aggregate_alias)
+ ]
+ select_values += self.select_values unless having_clause.empty?
+
+ select_values.concat group_columns.map { |aliaz, field|
+ if field.respond_to?(:as)
+ field.as(aliaz)
+ else
+ "#{field} AS #{aliaz}"
+ end
+ }
+
+ relation = except(:group).distinct!(false)
+ relation.group_values = group_fields
+ relation.select_values = select_values
+
+ calculated_data = skip_query_cache_if_necessary { @klass.connection.select_all(relation.arel, nil) }
+
+ if association
+ key_ids = calculated_data.collect { |row| row[group_aliases.first] }
+ key_records = association.klass.base_class.where(association.klass.base_class.primary_key => key_ids)
+ key_records = Hash[key_records.map { |r| [r.id, r] }]
+ end
+
+ Hash[calculated_data.map do |row|
+ key = group_columns.map { |aliaz, col_name|
+ type = type_for(col_name) do
+ calculated_data.column_types.fetch(aliaz, Type.default_value)
+ end
+ type_cast_calculated_value(row[aliaz], type)
+ }
+ key = key.first if key.size == 1
+ key = key_records[key] if associated
+
+ type = calculated_data.column_types.fetch(aggregate_alias) { type_for(column_name) }
+ [key, type_cast_calculated_value(row[aggregate_alias], type, operation)]
+ end]
+ end
+
+ # Converts the given keys to the value that the database adapter returns as
+ # a usable column name:
+ #
+ # column_alias_for("users.id") # => "users_id"
+ # column_alias_for("sum(id)") # => "sum_id"
+ # column_alias_for("count(distinct users.id)") # => "count_distinct_users_id"
+ # column_alias_for("count(*)") # => "count_all"
+ def column_alias_for(keys)
+ if keys.respond_to? :name
+ keys = "#{keys.relation.name}.#{keys.name}"
+ end
+
+ table_name = keys.to_s.downcase
+ table_name.gsub!(/\*/, "all")
+ table_name.gsub!(/\W+/, " ")
+ table_name.strip!
+ table_name.gsub!(/ +/, "_")
+
+ @klass.connection.table_alias_for(table_name)
+ end
+
+ def type_for(field, &block)
+ field_name = field.respond_to?(:name) ? field.name.to_s : field.to_s.split(".").last
+ @klass.type_for_attribute(field_name, &block)
+ end
+
+ def type_cast_calculated_value(value, type, operation = nil)
+ case operation
+ when "count" then value.to_i
+ when "sum" then type.deserialize(value || 0)
+ when "average" then value.respond_to?(:to_d) ? value.to_d : value
+ else type.deserialize(value)
+ end
+ end
+
+ def select_for_count
+ if select_values.present?
+ return select_values.first if select_values.one?
+ select_values.join(", ")
+ else
+ :all
+ end
+ end
+
+ def build_count_subquery(relation, column_name, distinct)
+ relation.select_values = [
+ if column_name == :all
+ distinct ? table[Arel.star] : Arel.sql("1")
+ else
+ column_alias = Arel.sql("count_column")
+ aggregate_column(column_name).as(column_alias)
+ end
+ ]
+
+ subquery = relation.arel.as(Arel.sql("subquery_for_count"))
+ select_value = operation_over_aggregate_column(column_alias || Arel.star, "count", false)
+
+ Arel::SelectManager.new(subquery).project(select_value)
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/delegation.rb b/activerecord/lib/active_record/relation/delegation.rb
new file mode 100644
index 0000000000..4793f2a49b
--- /dev/null
+++ b/activerecord/lib/active_record/relation/delegation.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Delegation # :nodoc:
+ module DelegateCache # :nodoc:
+ def relation_delegate_class(klass)
+ @relation_delegate_cache[klass]
+ end
+
+ def initialize_relation_delegate_cache
+ @relation_delegate_cache = cache = {}
+ [
+ ActiveRecord::Relation,
+ ActiveRecord::Associations::CollectionProxy,
+ ActiveRecord::AssociationRelation
+ ].each do |klass|
+ delegate = Class.new(klass) {
+ include ClassSpecificRelation
+ }
+ mangled_name = klass.name.gsub("::".freeze, "_".freeze)
+ const_set mangled_name, delegate
+ private_constant mangled_name
+
+ cache[klass] = delegate
+ end
+ end
+
+ def inherited(child_class)
+ child_class.initialize_relation_delegate_cache
+ super
+ end
+ end
+
+ extend ActiveSupport::Concern
+
+ # This module creates compiled delegation methods dynamically at runtime, which makes
+ # subsequent calls to that method faster by avoiding method_missing. The delegations
+ # may vary depending on the klass of a relation, so we create a subclass of Relation
+ # for each different klass, and the delegations are compiled into that subclass only.
+
+ delegate :to_xml, :encode_with, :length, :each, :uniq, :to_ary, :join,
+ :[], :&, :|, :+, :-, :sample, :reverse, :compact, :in_groups, :in_groups_of,
+ :to_sentence, :to_formatted_s, :as_json,
+ :shuffle, :split, :index, to: :records
+
+ delegate :table_name, :quoted_table_name, :primary_key, :quoted_primary_key,
+ :connection, :columns_hash, to: :klass
+
+ module ClassSpecificRelation # :nodoc:
+ extend ActiveSupport::Concern
+
+ included do
+ @delegation_mutex = Mutex.new
+ end
+
+ module ClassMethods # :nodoc:
+ def name
+ superclass.name
+ end
+
+ def delegate_to_scoped_klass(method)
+ @delegation_mutex.synchronize do
+ return if method_defined?(method)
+
+ if /\A[a-zA-Z_]\w*[!?]?\z/.match?(method)
+ module_eval <<-RUBY, __FILE__, __LINE__ + 1
+ def #{method}(*args, &block)
+ scoping { @klass.#{method}(*args, &block) }
+ end
+ RUBY
+ else
+ define_method method do |*args, &block|
+ scoping { @klass.public_send(method, *args, &block) }
+ end
+ end
+ end
+ end
+
+ def delegate(method, opts = {})
+ @delegation_mutex.synchronize do
+ return if method_defined?(method)
+ super
+ end
+ end
+ end
+
+ private
+
+ def method_missing(method, *args, &block)
+ if @klass.respond_to?(method)
+ self.class.delegate_to_scoped_klass(method)
+ scoping { @klass.public_send(method, *args, &block) }
+ elsif arel.respond_to?(method)
+ ActiveSupport::Deprecation.warn \
+ "Delegating #{method} to arel is deprecated and will be removed in Rails 6.0."
+ self.class.delegate method, to: :arel
+ arel.public_send(method, *args, &block)
+ else
+ super
+ end
+ end
+ end
+
+ module ClassMethods # :nodoc:
+ def create(klass, *args)
+ relation_class_for(klass).new(klass, *args)
+ end
+
+ private
+
+ def relation_class_for(klass)
+ klass.relation_delegate_class(self)
+ end
+ end
+
+ private
+ def respond_to_missing?(method, _)
+ super || @klass.respond_to?(method) || arel.respond_to?(method)
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/finder_methods.rb b/activerecord/lib/active_record/relation/finder_methods.rb
new file mode 100644
index 0000000000..626c50470e
--- /dev/null
+++ b/activerecord/lib/active_record/relation/finder_methods.rb
@@ -0,0 +1,575 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/string/filters"
+
+module ActiveRecord
+ module FinderMethods
+ ONE_AS_ONE = "1 AS one"
+
+ # Find by id - This can either be a specific id (1), a list of ids (1, 5, 6), or an array of ids ([5, 6, 10]).
+ # If one or more records can not be found for the requested ids, then RecordNotFound will be raised. If the primary key
+ # is an integer, find by id coerces its arguments using +to_i+.
+ #
+ # Person.find(1) # returns the object for ID = 1
+ # Person.find("1") # returns the object for ID = 1
+ # Person.find("31-sarah") # returns the object for ID = 31
+ # Person.find(1, 2, 6) # returns an array for objects with IDs in (1, 2, 6)
+ # Person.find([7, 17]) # returns an array for objects with IDs in (7, 17)
+ # Person.find([1]) # returns an array for the object with ID = 1
+ # Person.where("administrator = 1").order("created_on DESC").find(1)
+ #
+ # NOTE: The returned records may not be in the same order as the ids you
+ # provide since database rows are unordered. You will need to provide an explicit QueryMethods#order
+ # option if you want the results to be sorted.
+ #
+ # ==== Find with lock
+ #
+ # Example for find with a lock: Imagine two concurrent transactions:
+ # each will read <tt>person.visits == 2</tt>, add 1 to it, and save, resulting
+ # in two saves of <tt>person.visits = 3</tt>. By locking the row, the second
+ # transaction has to wait until the first is finished; we get the
+ # expected <tt>person.visits == 4</tt>.
+ #
+ # Person.transaction do
+ # person = Person.lock(true).find(1)
+ # person.visits += 1
+ # person.save!
+ # end
+ #
+ # ==== Variations of #find
+ #
+ # Person.where(name: 'Spartacus', rating: 4)
+ # # returns a chainable list (which can be empty).
+ #
+ # Person.find_by(name: 'Spartacus', rating: 4)
+ # # returns the first item or nil.
+ #
+ # Person.find_or_initialize_by(name: 'Spartacus', rating: 4)
+ # # returns the first item or returns a new instance (requires you call .save to persist against the database).
+ #
+ # Person.find_or_create_by(name: 'Spartacus', rating: 4)
+ # # returns the first item or creates it and returns it.
+ #
+ # ==== Alternatives for #find
+ #
+ # Person.where(name: 'Spartacus', rating: 4).exists?(conditions = :none)
+ # # returns a boolean indicating if any record with the given conditions exist.
+ #
+ # Person.where(name: 'Spartacus', rating: 4).select("field1, field2, field3")
+ # # returns a chainable list of instances with only the mentioned fields.
+ #
+ # Person.where(name: 'Spartacus', rating: 4).ids
+ # # returns an Array of ids.
+ #
+ # Person.where(name: 'Spartacus', rating: 4).pluck(:field1, :field2)
+ # # returns an Array of the required fields.
+ def find(*args)
+ return super if block_given?
+ find_with_ids(*args)
+ end
+
+ # Finds the first record matching the specified conditions. There
+ # is no implied ordering so if order matters, you should specify it
+ # yourself.
+ #
+ # If no record is found, returns <tt>nil</tt>.
+ #
+ # Post.find_by name: 'Spartacus', rating: 4
+ # Post.find_by "published_at < ?", 2.weeks.ago
+ def find_by(arg, *args)
+ where(arg, *args).take
+ rescue ::RangeError
+ nil
+ end
+
+ # Like #find_by, except that if no record is found, raises
+ # an ActiveRecord::RecordNotFound error.
+ def find_by!(arg, *args)
+ where(arg, *args).take!
+ rescue ::RangeError
+ raise RecordNotFound.new("Couldn't find #{@klass.name} with an out of range value",
+ @klass.name)
+ end
+
+ # Gives a record (or N records if a parameter is supplied) without any implied
+ # order. The order will depend on the database implementation.
+ # If an order is supplied it will be respected.
+ #
+ # Person.take # returns an object fetched by SELECT * FROM people LIMIT 1
+ # Person.take(5) # returns 5 objects fetched by SELECT * FROM people LIMIT 5
+ # Person.where(["name LIKE '%?'", name]).take
+ def take(limit = nil)
+ limit ? find_take_with_limit(limit) : find_take
+ end
+
+ # Same as #take but raises ActiveRecord::RecordNotFound if no record
+ # is found. Note that #take! accepts no arguments.
+ def take!
+ take || raise_record_not_found_exception!
+ end
+
+ # Find the first record (or first N records if a parameter is supplied).
+ # If no order is defined it will order by primary key.
+ #
+ # Person.first # returns the first object fetched by SELECT * FROM people ORDER BY people.id LIMIT 1
+ # Person.where(["user_name = ?", user_name]).first
+ # Person.where(["user_name = :u", { u: user_name }]).first
+ # Person.order("created_on DESC").offset(5).first
+ # Person.first(3) # returns the first three objects fetched by SELECT * FROM people ORDER BY people.id LIMIT 3
+ #
+ def first(limit = nil)
+ if limit
+ find_nth_with_limit(0, limit)
+ else
+ find_nth 0
+ end
+ end
+
+ # Same as #first but raises ActiveRecord::RecordNotFound if no record
+ # is found. Note that #first! accepts no arguments.
+ def first!
+ first || raise_record_not_found_exception!
+ end
+
+ # Find the last record (or last N records if a parameter is supplied).
+ # If no order is defined it will order by primary key.
+ #
+ # Person.last # returns the last object fetched by SELECT * FROM people
+ # Person.where(["user_name = ?", user_name]).last
+ # Person.order("created_on DESC").offset(5).last
+ # Person.last(3) # returns the last three objects fetched by SELECT * FROM people.
+ #
+ # Take note that in that last case, the results are sorted in ascending order:
+ #
+ # [#<Person id:2>, #<Person id:3>, #<Person id:4>]
+ #
+ # and not:
+ #
+ # [#<Person id:4>, #<Person id:3>, #<Person id:2>]
+ def last(limit = nil)
+ return find_last(limit) if loaded? || limit_value
+
+ result = ordered_relation.limit(limit)
+ result = result.reverse_order!
+
+ limit ? result.reverse : result.first
+ end
+
+ # Same as #last but raises ActiveRecord::RecordNotFound if no record
+ # is found. Note that #last! accepts no arguments.
+ def last!
+ last || raise_record_not_found_exception!
+ end
+
+ # Find the second record.
+ # If no order is defined it will order by primary key.
+ #
+ # Person.second # returns the second object fetched by SELECT * FROM people
+ # Person.offset(3).second # returns the second object from OFFSET 3 (which is OFFSET 4)
+ # Person.where(["user_name = :u", { u: user_name }]).second
+ def second
+ find_nth 1
+ end
+
+ # Same as #second but raises ActiveRecord::RecordNotFound if no record
+ # is found.
+ def second!
+ second || raise_record_not_found_exception!
+ end
+
+ # Find the third record.
+ # If no order is defined it will order by primary key.
+ #
+ # Person.third # returns the third object fetched by SELECT * FROM people
+ # Person.offset(3).third # returns the third object from OFFSET 3 (which is OFFSET 5)
+ # Person.where(["user_name = :u", { u: user_name }]).third
+ def third
+ find_nth 2
+ end
+
+ # Same as #third but raises ActiveRecord::RecordNotFound if no record
+ # is found.
+ def third!
+ third || raise_record_not_found_exception!
+ end
+
+ # Find the fourth record.
+ # If no order is defined it will order by primary key.
+ #
+ # Person.fourth # returns the fourth object fetched by SELECT * FROM people
+ # Person.offset(3).fourth # returns the fourth object from OFFSET 3 (which is OFFSET 6)
+ # Person.where(["user_name = :u", { u: user_name }]).fourth
+ def fourth
+ find_nth 3
+ end
+
+ # Same as #fourth but raises ActiveRecord::RecordNotFound if no record
+ # is found.
+ def fourth!
+ fourth || raise_record_not_found_exception!
+ end
+
+ # Find the fifth record.
+ # If no order is defined it will order by primary key.
+ #
+ # Person.fifth # returns the fifth object fetched by SELECT * FROM people
+ # Person.offset(3).fifth # returns the fifth object from OFFSET 3 (which is OFFSET 7)
+ # Person.where(["user_name = :u", { u: user_name }]).fifth
+ def fifth
+ find_nth 4
+ end
+
+ # Same as #fifth but raises ActiveRecord::RecordNotFound if no record
+ # is found.
+ def fifth!
+ fifth || raise_record_not_found_exception!
+ end
+
+ # Find the forty-second record. Also known as accessing "the reddit".
+ # If no order is defined it will order by primary key.
+ #
+ # Person.forty_two # returns the forty-second object fetched by SELECT * FROM people
+ # Person.offset(3).forty_two # returns the forty-second object from OFFSET 3 (which is OFFSET 44)
+ # Person.where(["user_name = :u", { u: user_name }]).forty_two
+ def forty_two
+ find_nth 41
+ end
+
+ # Same as #forty_two but raises ActiveRecord::RecordNotFound if no record
+ # is found.
+ def forty_two!
+ forty_two || raise_record_not_found_exception!
+ end
+
+ # Find the third-to-last record.
+ # If no order is defined it will order by primary key.
+ #
+ # Person.third_to_last # returns the third-to-last object fetched by SELECT * FROM people
+ # Person.offset(3).third_to_last # returns the third-to-last object from OFFSET 3
+ # Person.where(["user_name = :u", { u: user_name }]).third_to_last
+ def third_to_last
+ find_nth_from_last 3
+ end
+
+ # Same as #third_to_last but raises ActiveRecord::RecordNotFound if no record
+ # is found.
+ def third_to_last!
+ third_to_last || raise_record_not_found_exception!
+ end
+
+ # Find the second-to-last record.
+ # If no order is defined it will order by primary key.
+ #
+ # Person.second_to_last # returns the second-to-last object fetched by SELECT * FROM people
+ # Person.offset(3).second_to_last # returns the second-to-last object from OFFSET 3
+ # Person.where(["user_name = :u", { u: user_name }]).second_to_last
+ def second_to_last
+ find_nth_from_last 2
+ end
+
+ # Same as #second_to_last but raises ActiveRecord::RecordNotFound if no record
+ # is found.
+ def second_to_last!
+ second_to_last || raise_record_not_found_exception!
+ end
+
+ # Returns true if a record exists in the table that matches the +id+ or
+ # conditions given, or false otherwise. The argument can take six forms:
+ #
+ # * Integer - Finds the record with this primary key.
+ # * String - Finds the record with a primary key corresponding to this
+ # string (such as <tt>'5'</tt>).
+ # * Array - Finds the record that matches these +find+-style conditions
+ # (such as <tt>['name LIKE ?', "%#{query}%"]</tt>).
+ # * Hash - Finds the record that matches these +find+-style conditions
+ # (such as <tt>{name: 'David'}</tt>).
+ # * +false+ - Returns always +false+.
+ # * No args - Returns +false+ if the table is empty, +true+ otherwise.
+ #
+ # For more information about specifying conditions as a hash or array,
+ # see the Conditions section in the introduction to ActiveRecord::Base.
+ #
+ # Note: You can't pass in a condition as a string (like <tt>name =
+ # 'Jamie'</tt>), since it would be sanitized and then queried against
+ # the primary key column, like <tt>id = 'name = \'Jamie\''</tt>.
+ #
+ # Person.exists?(5)
+ # Person.exists?('5')
+ # Person.exists?(['name LIKE ?', "%#{query}%"])
+ # Person.exists?(id: [1, 4, 8])
+ # Person.exists?(name: 'David')
+ # Person.exists?(false)
+ # Person.exists?
+ def exists?(conditions = :none)
+ if Base === conditions
+ raise ArgumentError, <<-MSG.squish
+ You are passing an instance of ActiveRecord::Base to `exists?`.
+ Please pass the id of the object by calling `.id`.
+ MSG
+ end
+
+ return false if !conditions || limit_value == 0
+
+ relation = self unless eager_loading?
+ relation ||= apply_join_dependency(self, construct_join_dependency(eager_loading: false))
+
+ return false if ActiveRecord::NullRelation === relation
+
+ relation = construct_relation_for_exists(relation, conditions)
+
+ skip_query_cache_if_necessary { connection.select_value(relation.arel, "#{name} Exists") } ? true : false
+ rescue ::RangeError
+ false
+ end
+
+ # This method is called whenever no records are found with either a single
+ # id or multiple ids and raises an ActiveRecord::RecordNotFound exception.
+ #
+ # The error message is different depending on whether a single id or
+ # multiple ids are provided. If multiple ids are provided, then the number
+ # of results obtained should be provided in the +result_size+ argument and
+ # the expected number of results should be provided in the +expected_size+
+ # argument.
+ def raise_record_not_found_exception!(ids = nil, result_size = nil, expected_size = nil, key = primary_key) # :nodoc:
+ conditions = arel.where_sql(@klass)
+ conditions = " [#{conditions}]" if conditions
+ name = @klass.name
+
+ if ids.nil?
+ error = "Couldn't find #{name}".dup
+ error << " with#{conditions}" if conditions
+ raise RecordNotFound.new(error, name)
+ elsif Array(ids).size == 1
+ error = "Couldn't find #{name} with '#{key}'=#{ids}#{conditions}"
+ raise RecordNotFound.new(error, name, key, ids)
+ else
+ error = "Couldn't find all #{name.pluralize} with '#{key}': ".dup
+ error << "(#{ids.join(", ")})#{conditions} (found #{result_size} results, but was looking for #{expected_size})"
+
+ raise RecordNotFound.new(error, name, primary_key, ids)
+ end
+ end
+
+ private
+
+ def offset_index
+ offset_value || 0
+ end
+
+ def find_with_associations
+ # NOTE: the JoinDependency constructed here needs to know about
+ # any joins already present in `self`, so pass them in
+ #
+ # failing to do so means that in cases like activerecord/test/cases/associations/inner_join_association_test.rb:136
+ # incorrect SQL is generated. In that case, the join dependency for
+ # SpecialCategorizations is constructed without knowledge of the
+ # preexisting join in joins_values to categorizations (by way of
+ # the `has_many :through` for categories).
+ #
+ join_dependency = construct_join_dependency(joins_values)
+
+ aliases = join_dependency.aliases
+ relation = select aliases.columns
+ relation = apply_join_dependency(relation, join_dependency)
+
+ if block_given?
+ yield relation
+ else
+ if ActiveRecord::NullRelation === relation
+ []
+ else
+ rows = skip_query_cache_if_necessary { connection.select_all(relation.arel, "SQL") }
+ join_dependency.instantiate(rows, aliases)
+ end
+ end
+ end
+
+ def construct_relation_for_exists(relation, conditions)
+ relation = relation.except(:select, :distinct, :order)._select!(ONE_AS_ONE).limit!(1)
+
+ case conditions
+ when Array, Hash
+ relation.where!(conditions)
+ else
+ relation.where!(primary_key => conditions) unless conditions == :none
+ end
+
+ relation
+ end
+
+ def construct_join_dependency(joins = [], eager_loading: true)
+ including = eager_load_values + includes_values
+ ActiveRecord::Associations::JoinDependency.new(klass, table, including, joins, eager_loading: eager_loading)
+ end
+
+ def construct_relation_for_association_calculations
+ apply_join_dependency(self, construct_join_dependency(joins_values))
+ end
+
+ def apply_join_dependency(relation, join_dependency)
+ relation = relation.except(:includes, :eager_load, :preload).joins!(join_dependency)
+
+ if using_limitable_reflections?(join_dependency.reflections)
+ relation
+ else
+ if relation.limit_value
+ limited_ids = limited_ids_for(relation)
+ limited_ids.empty? ? relation.none! : relation.where!(primary_key => limited_ids)
+ end
+ relation.except(:limit, :offset)
+ end
+ end
+
+ def limited_ids_for(relation)
+ values = @klass.connection.columns_for_distinct(
+ "#{quoted_table_name}.#{quoted_primary_key}", relation.order_values)
+
+ relation = relation.except(:select).select(values).distinct!
+
+ id_rows = skip_query_cache_if_necessary { @klass.connection.select_all(relation.arel, "SQL") }
+ id_rows.map { |row| row[primary_key] }
+ end
+
+ def using_limitable_reflections?(reflections)
+ reflections.none?(&:collection?)
+ end
+
+ def find_with_ids(*ids)
+ raise UnknownPrimaryKey.new(@klass) if primary_key.nil?
+
+ expects_array = ids.first.kind_of?(Array)
+ return ids.first if expects_array && ids.first.empty?
+
+ ids = ids.flatten.compact.uniq
+
+ case ids.size
+ when 0
+ raise RecordNotFound, "Couldn't find #{@klass.name} without an ID"
+ when 1
+ result = find_one(ids.first)
+ expects_array ? [ result ] : result
+ else
+ find_some(ids)
+ end
+ rescue ::RangeError
+ raise RecordNotFound, "Couldn't find #{@klass.name} with an out of range ID"
+ end
+
+ def find_one(id)
+ if ActiveRecord::Base === id
+ raise ArgumentError, <<-MSG.squish
+ You are passing an instance of ActiveRecord::Base to `find`.
+ Please pass the id of the object by calling `.id`.
+ MSG
+ end
+
+ relation = where(primary_key => id)
+ record = relation.take
+
+ raise_record_not_found_exception!(id, 0, 1) unless record
+
+ record
+ end
+
+ def find_some(ids)
+ return find_some_ordered(ids) unless order_values.present?
+
+ result = where(primary_key => ids).to_a
+
+ expected_size =
+ if limit_value && ids.size > limit_value
+ limit_value
+ else
+ ids.size
+ end
+
+ # 11 ids with limit 3, offset 9 should give 2 results.
+ if offset_value && (ids.size - offset_value < expected_size)
+ expected_size = ids.size - offset_value
+ end
+
+ if result.size == expected_size
+ result
+ else
+ raise_record_not_found_exception!(ids, result.size, expected_size)
+ end
+ end
+
+ def find_some_ordered(ids)
+ ids = ids.slice(offset_value || 0, limit_value || ids.size) || []
+
+ result = except(:limit, :offset).where(primary_key => ids).records
+
+ if result.size == ids.size
+ pk_type = @klass.type_for_attribute(primary_key)
+
+ records_by_id = result.index_by(&:id)
+ ids.map { |id| records_by_id.fetch(pk_type.cast(id)) }
+ else
+ raise_record_not_found_exception!(ids, result.size, ids.size)
+ end
+ end
+
+ def find_take
+ if loaded?
+ records.first
+ else
+ @take ||= limit(1).records.first
+ end
+ end
+
+ def find_take_with_limit(limit)
+ if loaded?
+ records.take(limit)
+ else
+ limit(limit).to_a
+ end
+ end
+
+ def find_nth(index)
+ @offsets[offset_index + index] ||= find_nth_with_limit(index, 1).first
+ end
+
+ def find_nth_with_limit(index, limit)
+ if loaded?
+ records[index, limit] || []
+ else
+ relation = ordered_relation
+
+ if limit_value.nil? || index < limit_value
+ relation = relation.offset(offset_index + index) unless index.zero?
+ relation.limit(limit).to_a
+ else
+ []
+ end
+ end
+ end
+
+ def find_nth_from_last(index)
+ if loaded?
+ records[-index]
+ else
+ relation = ordered_relation
+
+ relation.to_a[-index]
+ # TODO: can be made more performant on large result sets by
+ # for instance, last(index)[-index] (which would require
+ # refactoring the last(n) finder method to make test suite pass),
+ # or by using a combination of reverse_order, limit, and offset,
+ # e.g., reverse_order.offset(index-1).first
+ end
+ end
+
+ def find_last(limit)
+ limit ? records.last(limit) : records.last
+ end
+
+ def ordered_relation
+ if order_values.empty? && primary_key
+ order(arel_attribute(primary_key).asc)
+ else
+ self
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/from_clause.rb b/activerecord/lib/active_record/relation/from_clause.rb
new file mode 100644
index 0000000000..c53a682aee
--- /dev/null
+++ b/activerecord/lib/active_record/relation/from_clause.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class Relation
+ class FromClause # :nodoc:
+ attr_reader :value, :name
+
+ def initialize(value, name)
+ @value = value
+ @name = name
+ end
+
+ def merge(other)
+ self
+ end
+
+ def empty?
+ value.nil?
+ end
+
+ def self.empty
+ @empty ||= new(nil, nil)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/merger.rb b/activerecord/lib/active_record/relation/merger.rb
new file mode 100644
index 0000000000..182f654897
--- /dev/null
+++ b/activerecord/lib/active_record/relation/merger.rb
@@ -0,0 +1,166 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/hash/keys"
+
+module ActiveRecord
+ class Relation
+ class HashMerger # :nodoc:
+ attr_reader :relation, :hash
+
+ def initialize(relation, hash)
+ hash.assert_valid_keys(*Relation::VALUE_METHODS)
+
+ @relation = relation
+ @hash = hash
+ end
+
+ def merge #:nodoc:
+ Merger.new(relation, other).merge
+ end
+
+ # Applying values to a relation has some side effects. E.g.
+ # interpolation might take place for where values. So we should
+ # build a relation to merge in rather than directly merging
+ # the values.
+ def other
+ other = Relation.create(relation.klass, relation.table, relation.predicate_builder)
+ hash.each { |k, v|
+ if k == :joins
+ if Hash === v
+ other.joins!(v)
+ else
+ other.joins!(*v)
+ end
+ elsif k == :select
+ other._select!(v)
+ else
+ other.send("#{k}!", v)
+ end
+ }
+ other
+ end
+ end
+
+ class Merger # :nodoc:
+ attr_reader :relation, :values, :other
+
+ def initialize(relation, other)
+ @relation = relation
+ @values = other.values
+ @other = other
+ end
+
+ NORMAL_VALUES = Relation::VALUE_METHODS -
+ Relation::CLAUSE_METHODS -
+ [:includes, :preload, :joins, :order, :reverse_order, :lock, :create_with, :reordering] # :nodoc:
+
+ def normal_values
+ NORMAL_VALUES
+ end
+
+ def merge
+ normal_values.each do |name|
+ value = values[name]
+ # The unless clause is here mostly for performance reasons (since the `send` call might be moderately
+ # expensive), most of the time the value is going to be `nil` or `.blank?`, the only catch is that
+ # `false.blank?` returns `true`, so there needs to be an extra check so that explicit `false` values
+ # don't fall through the cracks.
+ unless value.nil? || (value.blank? && false != value)
+ if name == :select
+ relation._select!(*value)
+ else
+ relation.send("#{name}!", *value)
+ end
+ end
+ end
+
+ merge_multi_values
+ merge_single_values
+ merge_clauses
+ merge_preloads
+ merge_joins
+
+ relation
+ end
+
+ private
+
+ def merge_preloads
+ return if other.preload_values.empty? && other.includes_values.empty?
+
+ if other.klass == relation.klass
+ relation.preload!(*other.preload_values) unless other.preload_values.empty?
+ relation.includes!(other.includes_values) unless other.includes_values.empty?
+ else
+ reflection = relation.klass.reflect_on_all_associations.find do |r|
+ r.class_name == other.klass.name
+ end || return
+
+ unless other.preload_values.empty?
+ relation.preload! reflection.name => other.preload_values
+ end
+
+ unless other.includes_values.empty?
+ relation.includes! reflection.name => other.includes_values
+ end
+ end
+ end
+
+ def merge_joins
+ return if other.joins_values.blank?
+
+ if other.klass == relation.klass
+ relation.joins!(*other.joins_values)
+ else
+ joins_dependency, rest = other.joins_values.partition do |join|
+ case join
+ when Hash, Symbol, Array
+ true
+ else
+ false
+ end
+ end
+
+ join_dependency = ActiveRecord::Associations::JoinDependency.new(
+ other.klass, other.table, joins_dependency, []
+ )
+
+ relation.joins! rest
+
+ @relation = relation.joins join_dependency
+ end
+ end
+
+ def merge_multi_values
+ if other.reordering_value
+ # override any order specified in the original relation
+ relation.reorder! other.order_values
+ elsif other.order_values
+ # merge in order_values from relation
+ relation.order! other.order_values
+ end
+
+ relation.extend(*other.extending_values) unless other.extending_values.blank?
+ end
+
+ def merge_single_values
+ if relation.from_clause.empty?
+ relation.from_clause = other.from_clause
+ end
+ relation.lock_value ||= other.lock_value
+
+ unless other.create_with_value.blank?
+ relation.create_with_value = (relation.create_with_value || {}).merge(other.create_with_value)
+ end
+ end
+
+ def merge_clauses
+ CLAUSE_METHODS.each do |method|
+ clause = relation.get_value(method)
+ other_clause = other.get_value(method)
+ relation.set_value(method, clause.merge(other_clause))
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/predicate_builder.rb b/activerecord/lib/active_record/relation/predicate_builder.rb
new file mode 100644
index 0000000000..5c42414072
--- /dev/null
+++ b/activerecord/lib/active_record/relation/predicate_builder.rb
@@ -0,0 +1,133 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class PredicateBuilder # :nodoc:
+ delegate :resolve_column_aliases, to: :table
+
+ def initialize(table)
+ @table = table
+ @handlers = []
+
+ register_handler(BasicObject, BasicObjectHandler.new(self))
+ register_handler(Base, BaseHandler.new(self))
+ register_handler(Range, RangeHandler.new(self))
+ register_handler(Relation, RelationHandler.new)
+ register_handler(Array, ArrayHandler.new(self))
+ end
+
+ def build_from_hash(attributes)
+ attributes = convert_dot_notation_to_hash(attributes)
+ expand_from_hash(attributes)
+ end
+
+ def self.references(attributes)
+ attributes.map do |key, value|
+ if value.is_a?(Hash)
+ key
+ else
+ key = key.to_s
+ key.split(".".freeze).first if key.include?(".".freeze)
+ end
+ end.compact
+ end
+
+ # Define how a class is converted to Arel nodes when passed to +where+.
+ # The handler can be any object that responds to +call+, and will be used
+ # for any value that +===+ the class given. For example:
+ #
+ # MyCustomDateRange = Struct.new(:start, :end)
+ # handler = proc do |column, range|
+ # Arel::Nodes::Between.new(column,
+ # Arel::Nodes::And.new([range.start, range.end])
+ # )
+ # end
+ # ActiveRecord::PredicateBuilder.new("users").register_handler(MyCustomDateRange, handler)
+ def register_handler(klass, handler)
+ @handlers.unshift([klass, handler])
+ end
+
+ def build(attribute, value)
+ handler_for(value).call(attribute, value)
+ end
+
+ def build_bind_attribute(column_name, value)
+ attr = Relation::QueryAttribute.new(column_name.to_s, value, table.type(column_name))
+ Arel::Nodes::BindParam.new(attr)
+ end
+
+ protected
+
+ attr_reader :table
+
+ def expand_from_hash(attributes)
+ return ["1=0"] if attributes.empty?
+
+ attributes.flat_map do |key, value|
+ if value.is_a?(Hash) && !table.has_column?(key)
+ associated_predicate_builder(key).expand_from_hash(value)
+ elsif table.associated_with?(key)
+ # Find the foreign key when using queries such as:
+ # Post.where(author: author)
+ #
+ # For polymorphic relationships, find the foreign key and type:
+ # PriceEstimate.where(estimate_of: treasure)
+ associated_table = table.associated_table(key)
+ if associated_table.polymorphic_association?
+ case value.is_a?(Array) ? value.first : value
+ when Base, Relation
+ value = [value] unless value.is_a?(Array)
+ klass = PolymorphicArrayValue
+ end
+ end
+
+ klass ||= AssociationQueryValue
+ queries = klass.new(associated_table, value).queries.map do |query|
+ expand_from_hash(query).reduce(&:and)
+ end
+ queries.reduce(&:or)
+ # FIXME: Deprecate this and provide a public API to force equality
+ elsif (value.is_a?(Range) || value.is_a?(Array)) &&
+ table.type(key.to_s).respond_to?(:subtype)
+ BasicObjectHandler.new(self).call(table.arel_attribute(key), value)
+ else
+ build(table.arel_attribute(key), value)
+ end
+ end
+ end
+
+ private
+
+ def associated_predicate_builder(association_name)
+ self.class.new(table.associated_table(association_name))
+ end
+
+ def convert_dot_notation_to_hash(attributes)
+ dot_notation = attributes.select do |k, v|
+ k.include?(".".freeze) && !v.is_a?(Hash)
+ end
+
+ dot_notation.each_key do |key|
+ table_name, column_name = key.split(".".freeze)
+ value = attributes.delete(key)
+ attributes[table_name] ||= {}
+
+ attributes[table_name] = attributes[table_name].merge(column_name => value)
+ end
+
+ attributes
+ end
+
+ def handler_for(object)
+ @handlers.detect { |klass, _| klass === object }.last
+ end
+ end
+end
+
+require_relative "predicate_builder/array_handler"
+require_relative "predicate_builder/base_handler"
+require_relative "predicate_builder/basic_object_handler"
+require_relative "predicate_builder/range_handler"
+require_relative "predicate_builder/relation_handler"
+
+require_relative "predicate_builder/association_query_value"
+require_relative "predicate_builder/polymorphic_array_value"
diff --git a/activerecord/lib/active_record/relation/predicate_builder/array_handler.rb b/activerecord/lib/active_record/relation/predicate_builder/array_handler.rb
new file mode 100644
index 0000000000..2fd75c8958
--- /dev/null
+++ b/activerecord/lib/active_record/relation/predicate_builder/array_handler.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class PredicateBuilder
+ class ArrayHandler # :nodoc:
+ def initialize(predicate_builder)
+ @predicate_builder = predicate_builder
+ end
+
+ def call(attribute, value)
+ return attribute.in([]) if value.empty?
+
+ values = value.map { |x| x.is_a?(Base) ? x.id : x }
+ nils, values = values.partition(&:nil?)
+ ranges, values = values.partition { |v| v.is_a?(Range) }
+
+ values_predicate =
+ case values.length
+ when 0 then NullPredicate
+ when 1 then predicate_builder.build(attribute, values.first)
+ else
+ bind_values = values.map do |v|
+ predicate_builder.build_bind_attribute(attribute.name, v)
+ end
+ attribute.in(bind_values)
+ end
+
+ unless nils.empty?
+ values_predicate = values_predicate.or(predicate_builder.build(attribute, nil))
+ end
+
+ array_predicates = ranges.map { |range| predicate_builder.build(attribute, range) }
+ array_predicates.unshift(values_predicate)
+ array_predicates.inject(&:or)
+ end
+
+ protected
+
+ attr_reader :predicate_builder
+
+ module NullPredicate # :nodoc:
+ def self.or(other)
+ other
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/predicate_builder/association_query_value.rb b/activerecord/lib/active_record/relation/predicate_builder/association_query_value.rb
new file mode 100644
index 0000000000..e64d9fdf2a
--- /dev/null
+++ b/activerecord/lib/active_record/relation/predicate_builder/association_query_value.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class PredicateBuilder
+ class AssociationQueryValue # :nodoc:
+ def initialize(associated_table, value)
+ @associated_table = associated_table
+ @value = value
+ end
+
+ def queries
+ [associated_table.association_foreign_key.to_s => ids]
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+ attr_reader :associated_table, :value
+
+ private
+ def ids
+ case value
+ when Relation
+ value.select_values.empty? ? value.select(primary_key) : value
+ when Array
+ value.map { |v| convert_to_id(v) }
+ else
+ convert_to_id(value)
+ end
+ end
+
+ def primary_key
+ associated_table.association_primary_key
+ end
+
+ def convert_to_id(value)
+ case value
+ when Base
+ value._read_attribute(primary_key)
+ else
+ value
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/predicate_builder/base_handler.rb b/activerecord/lib/active_record/relation/predicate_builder/base_handler.rb
new file mode 100644
index 0000000000..112821135f
--- /dev/null
+++ b/activerecord/lib/active_record/relation/predicate_builder/base_handler.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class PredicateBuilder
+ class BaseHandler # :nodoc:
+ def initialize(predicate_builder)
+ @predicate_builder = predicate_builder
+ end
+
+ def call(attribute, value)
+ predicate_builder.build(attribute, value.id)
+ end
+
+ protected
+
+ attr_reader :predicate_builder
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/predicate_builder/basic_object_handler.rb b/activerecord/lib/active_record/relation/predicate_builder/basic_object_handler.rb
new file mode 100644
index 0000000000..34db266f05
--- /dev/null
+++ b/activerecord/lib/active_record/relation/predicate_builder/basic_object_handler.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class PredicateBuilder
+ class BasicObjectHandler # :nodoc:
+ def initialize(predicate_builder)
+ @predicate_builder = predicate_builder
+ end
+
+ def call(attribute, value)
+ bind = predicate_builder.build_bind_attribute(attribute.name, value)
+ attribute.eq(bind)
+ end
+
+ protected
+
+ attr_reader :predicate_builder
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/predicate_builder/polymorphic_array_value.rb b/activerecord/lib/active_record/relation/predicate_builder/polymorphic_array_value.rb
new file mode 100644
index 0000000000..b87b5c36dd
--- /dev/null
+++ b/activerecord/lib/active_record/relation/predicate_builder/polymorphic_array_value.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class PredicateBuilder
+ class PolymorphicArrayValue # :nodoc:
+ def initialize(associated_table, values)
+ @associated_table = associated_table
+ @values = values
+ end
+
+ def queries
+ type_to_ids_mapping.map do |type, ids|
+ {
+ associated_table.association_foreign_type.to_s => type,
+ associated_table.association_foreign_key.to_s => ids
+ }
+ end
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+ attr_reader :associated_table, :values
+
+ private
+ def type_to_ids_mapping
+ default_hash = Hash.new { |hsh, key| hsh[key] = [] }
+ values.each_with_object(default_hash) { |value, hash| hash[base_class(value).name] << convert_to_id(value) }
+ end
+
+ def primary_key(value)
+ associated_table.association_primary_key(base_class(value))
+ end
+
+ def base_class(value)
+ case value
+ when Base
+ value.class.base_class
+ when Relation
+ value.klass.base_class
+ end
+ end
+
+ def convert_to_id(value)
+ case value
+ when Base
+ value._read_attribute(primary_key(value))
+ when Relation
+ value.select(primary_key(value))
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/predicate_builder/range_handler.rb b/activerecord/lib/active_record/relation/predicate_builder/range_handler.rb
new file mode 100644
index 0000000000..6d16579708
--- /dev/null
+++ b/activerecord/lib/active_record/relation/predicate_builder/range_handler.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class PredicateBuilder
+ class RangeHandler # :nodoc:
+ class RangeWithBinds < Struct.new(:begin, :end)
+ def exclude_end?
+ false
+ end
+ end
+
+ def initialize(predicate_builder)
+ @predicate_builder = predicate_builder
+ end
+
+ def call(attribute, value)
+ begin_bind = predicate_builder.build_bind_attribute(attribute.name, value.begin)
+ end_bind = predicate_builder.build_bind_attribute(attribute.name, value.end)
+ if value.begin.respond_to?(:infinite?) && value.begin.infinite?
+ if value.end.respond_to?(:infinite?) && value.end.infinite?
+ attribute.not_in([])
+ elsif value.exclude_end?
+ attribute.lt(end_bind)
+ else
+ attribute.lteq(end_bind)
+ end
+ elsif value.end.respond_to?(:infinite?) && value.end.infinite?
+ attribute.gteq(begin_bind)
+ elsif value.exclude_end?
+ attribute.gteq(begin_bind).and(attribute.lt(end_bind))
+ else
+ attribute.between(RangeWithBinds.new(begin_bind, end_bind))
+ end
+ end
+
+ protected
+
+ attr_reader :predicate_builder
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/predicate_builder/relation_handler.rb b/activerecord/lib/active_record/relation/predicate_builder/relation_handler.rb
new file mode 100644
index 0000000000..f51ea4fde0
--- /dev/null
+++ b/activerecord/lib/active_record/relation/predicate_builder/relation_handler.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class PredicateBuilder
+ class RelationHandler # :nodoc:
+ def call(attribute, value)
+ if value.select_values.empty?
+ value = value.select(value.arel_attribute(value.klass.primary_key))
+ end
+
+ attribute.in(value.arel)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/query_attribute.rb b/activerecord/lib/active_record/relation/query_attribute.rb
new file mode 100644
index 0000000000..5a9a7fd432
--- /dev/null
+++ b/activerecord/lib/active_record/relation/query_attribute.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require_relative "../attribute"
+
+module ActiveRecord
+ class Relation
+ class QueryAttribute < Attribute # :nodoc:
+ def type_cast(value)
+ value
+ end
+
+ def value_for_database
+ @value_for_database ||= super
+ end
+
+ def with_cast_value(value)
+ QueryAttribute.new(name, value, type)
+ end
+
+ def nil?
+ !value_before_type_cast.is_a?(StatementCache::Substitute) &&
+ (value_before_type_cast.nil? || value_for_database.nil?)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/query_methods.rb b/activerecord/lib/active_record/relation/query_methods.rb
new file mode 100644
index 0000000000..f7fe968b55
--- /dev/null
+++ b/activerecord/lib/active_record/relation/query_methods.rb
@@ -0,0 +1,1193 @@
+# frozen_string_literal: true
+
+require_relative "from_clause"
+require_relative "query_attribute"
+require_relative "where_clause"
+require_relative "where_clause_factory"
+require "active_model/forbidden_attributes_protection"
+
+module ActiveRecord
+ module QueryMethods
+ extend ActiveSupport::Concern
+
+ include ActiveModel::ForbiddenAttributesProtection
+
+ # WhereChain objects act as placeholder for queries in which #where does not have any parameter.
+ # In this case, #where must be chained with #not to return a new relation.
+ class WhereChain
+ include ActiveModel::ForbiddenAttributesProtection
+
+ def initialize(scope)
+ @scope = scope
+ end
+
+ # Returns a new relation expressing WHERE + NOT condition according to
+ # the conditions in the arguments.
+ #
+ # #not accepts conditions as a string, array, or hash. See QueryMethods#where for
+ # more details on each format.
+ #
+ # User.where.not("name = 'Jon'")
+ # # SELECT * FROM users WHERE NOT (name = 'Jon')
+ #
+ # User.where.not(["name = ?", "Jon"])
+ # # SELECT * FROM users WHERE NOT (name = 'Jon')
+ #
+ # User.where.not(name: "Jon")
+ # # SELECT * FROM users WHERE name != 'Jon'
+ #
+ # User.where.not(name: nil)
+ # # SELECT * FROM users WHERE name IS NOT NULL
+ #
+ # User.where.not(name: %w(Ko1 Nobu))
+ # # SELECT * FROM users WHERE name NOT IN ('Ko1', 'Nobu')
+ #
+ # User.where.not(name: "Jon", role: "admin")
+ # # SELECT * FROM users WHERE name != 'Jon' AND role != 'admin'
+ def not(opts, *rest)
+ opts = sanitize_forbidden_attributes(opts)
+
+ where_clause = @scope.send(:where_clause_factory).build(opts, rest)
+
+ @scope.references!(PredicateBuilder.references(opts)) if Hash === opts
+ @scope.where_clause += where_clause.invert
+ @scope
+ end
+ end
+
+ FROZEN_EMPTY_ARRAY = [].freeze
+ FROZEN_EMPTY_HASH = {}.freeze
+
+ Relation::VALUE_METHODS.each do |name|
+ method_name = \
+ case name
+ when *Relation::MULTI_VALUE_METHODS then "#{name}_values"
+ when *Relation::SINGLE_VALUE_METHODS then "#{name}_value"
+ when *Relation::CLAUSE_METHODS then "#{name}_clause"
+ end
+ class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def #{method_name} # def includes_values
+ get_value(#{name.inspect}) # get_value(:includes)
+ end # end
+
+ def #{method_name}=(value) # def includes_values=(value)
+ set_value(#{name.inspect}, value) # set_value(:includes, value)
+ end # end
+ CODE
+ end
+
+ alias extensions extending_values
+
+ # Specify relationships to be included in the result set. For
+ # example:
+ #
+ # users = User.includes(:address)
+ # users.each do |user|
+ # user.address.city
+ # end
+ #
+ # allows you to access the +address+ attribute of the +User+ model without
+ # firing an additional query. This will often result in a
+ # performance improvement over a simple join.
+ #
+ # You can also specify multiple relationships, like this:
+ #
+ # users = User.includes(:address, :friends)
+ #
+ # Loading nested relationships is possible using a Hash:
+ #
+ # users = User.includes(:address, friends: [:address, :followers])
+ #
+ # === conditions
+ #
+ # If you want to add conditions to your included models you'll have
+ # to explicitly reference them. For example:
+ #
+ # User.includes(:posts).where('posts.name = ?', 'example')
+ #
+ # Will throw an error, but this will work:
+ #
+ # User.includes(:posts).where('posts.name = ?', 'example').references(:posts)
+ #
+ # Note that #includes works with association names while #references needs
+ # the actual table name.
+ def includes(*args)
+ check_if_method_has_arguments!(:includes, args)
+ spawn.includes!(*args)
+ end
+
+ def includes!(*args) # :nodoc:
+ args.reject!(&:blank?)
+ args.flatten!
+
+ self.includes_values |= args
+ self
+ end
+
+ # Forces eager loading by performing a LEFT OUTER JOIN on +args+:
+ #
+ # User.eager_load(:posts)
+ # # SELECT "users"."id" AS t0_r0, "users"."name" AS t0_r1, ...
+ # # FROM "users" LEFT OUTER JOIN "posts" ON "posts"."user_id" =
+ # # "users"."id"
+ def eager_load(*args)
+ check_if_method_has_arguments!(:eager_load, args)
+ spawn.eager_load!(*args)
+ end
+
+ def eager_load!(*args) # :nodoc:
+ self.eager_load_values += args
+ self
+ end
+
+ # Allows preloading of +args+, in the same way that #includes does:
+ #
+ # User.preload(:posts)
+ # # SELECT "posts".* FROM "posts" WHERE "posts"."user_id" IN (1, 2, 3)
+ def preload(*args)
+ check_if_method_has_arguments!(:preload, args)
+ spawn.preload!(*args)
+ end
+
+ def preload!(*args) # :nodoc:
+ self.preload_values += args
+ self
+ end
+
+ # Use to indicate that the given +table_names+ are referenced by an SQL string,
+ # and should therefore be JOINed in any query rather than loaded separately.
+ # This method only works in conjunction with #includes.
+ # See #includes for more details.
+ #
+ # User.includes(:posts).where("posts.name = 'foo'")
+ # # Doesn't JOIN the posts table, resulting in an error.
+ #
+ # User.includes(:posts).where("posts.name = 'foo'").references(:posts)
+ # # Query now knows the string references posts, so adds a JOIN
+ def references(*table_names)
+ check_if_method_has_arguments!(:references, table_names)
+ spawn.references!(*table_names)
+ end
+
+ def references!(*table_names) # :nodoc:
+ table_names.flatten!
+ table_names.map!(&:to_s)
+
+ self.references_values |= table_names
+ self
+ end
+
+ # Works in two unique ways.
+ #
+ # First: takes a block so it can be used just like <tt>Array#select</tt>.
+ #
+ # Model.all.select { |m| m.field == value }
+ #
+ # This will build an array of objects from the database for the scope,
+ # converting them into an array and iterating through them using
+ # <tt>Array#select</tt>.
+ #
+ # Second: Modifies the SELECT statement for the query so that only certain
+ # fields are retrieved:
+ #
+ # Model.select(:field)
+ # # => [#<Model id: nil, field: "value">]
+ #
+ # Although in the above example it looks as though this method returns an
+ # array, it actually returns a relation object and can have other query
+ # methods appended to it, such as the other methods in ActiveRecord::QueryMethods.
+ #
+ # The argument to the method can also be an array of fields.
+ #
+ # Model.select(:field, :other_field, :and_one_more)
+ # # => [#<Model id: nil, field: "value", other_field: "value", and_one_more: "value">]
+ #
+ # You can also use one or more strings, which will be used unchanged as SELECT fields.
+ #
+ # Model.select('field AS field_one', 'other_field AS field_two')
+ # # => [#<Model id: nil, field: "value", other_field: "value">]
+ #
+ # If an alias was specified, it will be accessible from the resulting objects:
+ #
+ # Model.select('field AS field_one').first.field_one
+ # # => "value"
+ #
+ # Accessing attributes of an object that do not have fields retrieved by a select
+ # except +id+ will throw ActiveModel::MissingAttributeError:
+ #
+ # Model.select(:field).first.other_field
+ # # => ActiveModel::MissingAttributeError: missing attribute: other_field
+ def select(*fields)
+ if block_given?
+ if fields.any?
+ raise ArgumentError, "`select' with block doesn't take arguments."
+ end
+
+ return super()
+ end
+
+ raise ArgumentError, "Call `select' with at least one field" if fields.empty?
+ spawn._select!(*fields)
+ end
+
+ def _select!(*fields) # :nodoc:
+ fields.flatten!
+ fields.map! do |field|
+ klass.attribute_alias?(field) ? klass.attribute_alias(field).to_sym : field
+ end
+ self.select_values += fields
+ self
+ end
+
+ # Allows to specify a group attribute:
+ #
+ # User.group(:name)
+ # # SELECT "users".* FROM "users" GROUP BY name
+ #
+ # Returns an array with distinct records based on the +group+ attribute:
+ #
+ # User.select([:id, :name])
+ # # => [#<User id: 1, name: "Oscar">, #<User id: 2, name: "Oscar">, #<User id: 3, name: "Foo">]
+ #
+ # User.group(:name)
+ # # => [#<User id: 3, name: "Foo", ...>, #<User id: 2, name: "Oscar", ...>]
+ #
+ # User.group('name AS grouped_name, age')
+ # # => [#<User id: 3, name: "Foo", age: 21, ...>, #<User id: 2, name: "Oscar", age: 21, ...>, #<User id: 5, name: "Foo", age: 23, ...>]
+ #
+ # Passing in an array of attributes to group by is also supported.
+ #
+ # User.select([:id, :first_name]).group(:id, :first_name).first(3)
+ # # => [#<User id: 1, first_name: "Bill">, #<User id: 2, first_name: "Earl">, #<User id: 3, first_name: "Beto">]
+ def group(*args)
+ check_if_method_has_arguments!(:group, args)
+ spawn.group!(*args)
+ end
+
+ def group!(*args) # :nodoc:
+ args.flatten!
+
+ self.group_values += args
+ self
+ end
+
+ # Allows to specify an order attribute:
+ #
+ # User.order(:name)
+ # # SELECT "users".* FROM "users" ORDER BY "users"."name" ASC
+ #
+ # User.order(email: :desc)
+ # # SELECT "users".* FROM "users" ORDER BY "users"."email" DESC
+ #
+ # User.order(:name, email: :desc)
+ # # SELECT "users".* FROM "users" ORDER BY "users"."name" ASC, "users"."email" DESC
+ #
+ # User.order('name')
+ # # SELECT "users".* FROM "users" ORDER BY name
+ #
+ # User.order('name DESC')
+ # # SELECT "users".* FROM "users" ORDER BY name DESC
+ #
+ # User.order('name DESC, email')
+ # # SELECT "users".* FROM "users" ORDER BY name DESC, email
+ def order(*args)
+ check_if_method_has_arguments!(:order, args)
+ spawn.order!(*args)
+ end
+
+ def order!(*args) # :nodoc:
+ preprocess_order_args(args)
+
+ self.order_values += args
+ self
+ end
+
+ # Replaces any existing order defined on the relation with the specified order.
+ #
+ # User.order('email DESC').reorder('id ASC') # generated SQL has 'ORDER BY id ASC'
+ #
+ # Subsequent calls to order on the same relation will be appended. For example:
+ #
+ # User.order('email DESC').reorder('id ASC').order('name ASC')
+ #
+ # generates a query with 'ORDER BY id ASC, name ASC'.
+ def reorder(*args)
+ check_if_method_has_arguments!(:reorder, args)
+ spawn.reorder!(*args)
+ end
+
+ def reorder!(*args) # :nodoc:
+ preprocess_order_args(args)
+
+ self.reordering_value = true
+ self.order_values = args
+ self
+ end
+
+ VALID_UNSCOPING_VALUES = Set.new([:where, :select, :group, :order, :lock,
+ :limit, :offset, :joins, :includes, :from,
+ :readonly, :having])
+
+ # Removes an unwanted relation that is already defined on a chain of relations.
+ # This is useful when passing around chains of relations and would like to
+ # modify the relations without reconstructing the entire chain.
+ #
+ # User.order('email DESC').unscope(:order) == User.all
+ #
+ # The method arguments are symbols which correspond to the names of the methods
+ # which should be unscoped. The valid arguments are given in VALID_UNSCOPING_VALUES.
+ # The method can also be called with multiple arguments. For example:
+ #
+ # User.order('email DESC').select('id').where(name: "John")
+ # .unscope(:order, :select, :where) == User.all
+ #
+ # One can additionally pass a hash as an argument to unscope specific +:where+ values.
+ # This is done by passing a hash with a single key-value pair. The key should be
+ # +:where+ and the value should be the where value to unscope. For example:
+ #
+ # User.where(name: "John", active: true).unscope(where: :name)
+ # == User.where(active: true)
+ #
+ # This method is similar to #except, but unlike
+ # #except, it persists across merges:
+ #
+ # User.order('email').merge(User.except(:order))
+ # == User.order('email')
+ #
+ # User.order('email').merge(User.unscope(:order))
+ # == User.all
+ #
+ # This means it can be used in association definitions:
+ #
+ # has_many :comments, -> { unscope(where: :trashed) }
+ #
+ def unscope(*args)
+ check_if_method_has_arguments!(:unscope, args)
+ spawn.unscope!(*args)
+ end
+
+ def unscope!(*args) # :nodoc:
+ args.flatten!
+ self.unscope_values += args
+
+ args.each do |scope|
+ case scope
+ when Symbol
+ if !VALID_UNSCOPING_VALUES.include?(scope)
+ raise ArgumentError, "Called unscope() with invalid unscoping argument ':#{scope}'. Valid arguments are :#{VALID_UNSCOPING_VALUES.to_a.join(", :")}."
+ end
+ set_value(scope, nil)
+ when Hash
+ scope.each do |key, target_value|
+ if key != :where
+ raise ArgumentError, "Hash arguments in .unscope(*args) must have :where as the key."
+ end
+
+ target_values = Array(target_value).map(&:to_s)
+ self.where_clause = where_clause.except(*target_values)
+ end
+ else
+ raise ArgumentError, "Unrecognized scoping: #{args.inspect}. Use .unscope(where: :attribute_name) or .unscope(:order), for example."
+ end
+ end
+
+ self
+ end
+
+ # Performs a joins on +args+. The given symbol(s) should match the name of
+ # the association(s).
+ #
+ # User.joins(:posts)
+ # # SELECT "users".*
+ # # FROM "users"
+ # # INNER JOIN "posts" ON "posts"."user_id" = "users"."id"
+ #
+ # Multiple joins:
+ #
+ # User.joins(:posts, :account)
+ # # SELECT "users".*
+ # # FROM "users"
+ # # INNER JOIN "posts" ON "posts"."user_id" = "users"."id"
+ # # INNER JOIN "accounts" ON "accounts"."id" = "users"."account_id"
+ #
+ # Nested joins:
+ #
+ # User.joins(posts: [:comments])
+ # # SELECT "users".*
+ # # FROM "users"
+ # # INNER JOIN "posts" ON "posts"."user_id" = "users"."id"
+ # # INNER JOIN "comments" "comments_posts"
+ # # ON "comments_posts"."post_id" = "posts"."id"
+ #
+ # You can use strings in order to customize your joins:
+ #
+ # User.joins("LEFT JOIN bookmarks ON bookmarks.bookmarkable_type = 'Post' AND bookmarks.user_id = users.id")
+ # # SELECT "users".* FROM "users" LEFT JOIN bookmarks ON bookmarks.bookmarkable_type = 'Post' AND bookmarks.user_id = users.id
+ def joins(*args)
+ check_if_method_has_arguments!(:joins, args)
+ spawn.joins!(*args)
+ end
+
+ def joins!(*args) # :nodoc:
+ args.compact!
+ args.flatten!
+ self.joins_values += args
+ self
+ end
+
+ # Performs a left outer joins on +args+:
+ #
+ # User.left_outer_joins(:posts)
+ # => SELECT "users".* FROM "users" LEFT OUTER JOIN "posts" ON "posts"."user_id" = "users"."id"
+ #
+ def left_outer_joins(*args)
+ check_if_method_has_arguments!(:left_outer_joins, args)
+
+ args.compact!
+ args.flatten!
+
+ spawn.left_outer_joins!(*args)
+ end
+ alias :left_joins :left_outer_joins
+
+ def left_outer_joins!(*args) # :nodoc:
+ self.left_outer_joins_values += args
+ self
+ end
+
+ # Returns a new relation, which is the result of filtering the current relation
+ # according to the conditions in the arguments.
+ #
+ # #where accepts conditions in one of several formats. In the examples below, the resulting
+ # SQL is given as an illustration; the actual query generated may be different depending
+ # on the database adapter.
+ #
+ # === string
+ #
+ # A single string, without additional arguments, is passed to the query
+ # constructor as an SQL fragment, and used in the where clause of the query.
+ #
+ # Client.where("orders_count = '2'")
+ # # SELECT * from clients where orders_count = '2';
+ #
+ # Note that building your own string from user input may expose your application
+ # to injection attacks if not done properly. As an alternative, it is recommended
+ # to use one of the following methods.
+ #
+ # === array
+ #
+ # If an array is passed, then the first element of the array is treated as a template, and
+ # the remaining elements are inserted into the template to generate the condition.
+ # Active Record takes care of building the query to avoid injection attacks, and will
+ # convert from the ruby type to the database type where needed. Elements are inserted
+ # into the string in the order in which they appear.
+ #
+ # User.where(["name = ? and email = ?", "Joe", "joe@example.com"])
+ # # SELECT * FROM users WHERE name = 'Joe' AND email = 'joe@example.com';
+ #
+ # Alternatively, you can use named placeholders in the template, and pass a hash as the
+ # second element of the array. The names in the template are replaced with the corresponding
+ # values from the hash.
+ #
+ # User.where(["name = :name and email = :email", { name: "Joe", email: "joe@example.com" }])
+ # # SELECT * FROM users WHERE name = 'Joe' AND email = 'joe@example.com';
+ #
+ # This can make for more readable code in complex queries.
+ #
+ # Lastly, you can use sprintf-style % escapes in the template. This works slightly differently
+ # than the previous methods; you are responsible for ensuring that the values in the template
+ # are properly quoted. The values are passed to the connector for quoting, but the caller
+ # is responsible for ensuring they are enclosed in quotes in the resulting SQL. After quoting,
+ # the values are inserted using the same escapes as the Ruby core method +Kernel::sprintf+.
+ #
+ # User.where(["name = '%s' and email = '%s'", "Joe", "joe@example.com"])
+ # # SELECT * FROM users WHERE name = 'Joe' AND email = 'joe@example.com';
+ #
+ # If #where is called with multiple arguments, these are treated as if they were passed as
+ # the elements of a single array.
+ #
+ # User.where("name = :name and email = :email", { name: "Joe", email: "joe@example.com" })
+ # # SELECT * FROM users WHERE name = 'Joe' AND email = 'joe@example.com';
+ #
+ # When using strings to specify conditions, you can use any operator available from
+ # the database. While this provides the most flexibility, you can also unintentionally introduce
+ # dependencies on the underlying database. If your code is intended for general consumption,
+ # test with multiple database backends.
+ #
+ # === hash
+ #
+ # #where will also accept a hash condition, in which the keys are fields and the values
+ # are values to be searched for.
+ #
+ # Fields can be symbols or strings. Values can be single values, arrays, or ranges.
+ #
+ # User.where({ name: "Joe", email: "joe@example.com" })
+ # # SELECT * FROM users WHERE name = 'Joe' AND email = 'joe@example.com'
+ #
+ # User.where({ name: ["Alice", "Bob"]})
+ # # SELECT * FROM users WHERE name IN ('Alice', 'Bob')
+ #
+ # User.where({ created_at: (Time.now.midnight - 1.day)..Time.now.midnight })
+ # # SELECT * FROM users WHERE (created_at BETWEEN '2012-06-09 07:00:00.000000' AND '2012-06-10 07:00:00.000000')
+ #
+ # In the case of a belongs_to relationship, an association key can be used
+ # to specify the model if an ActiveRecord object is used as the value.
+ #
+ # author = Author.find(1)
+ #
+ # # The following queries will be equivalent:
+ # Post.where(author: author)
+ # Post.where(author_id: author)
+ #
+ # This also works with polymorphic belongs_to relationships:
+ #
+ # treasure = Treasure.create(name: 'gold coins')
+ # treasure.price_estimates << PriceEstimate.create(price: 125)
+ #
+ # # The following queries will be equivalent:
+ # PriceEstimate.where(estimate_of: treasure)
+ # PriceEstimate.where(estimate_of_type: 'Treasure', estimate_of_id: treasure)
+ #
+ # === Joins
+ #
+ # If the relation is the result of a join, you may create a condition which uses any of the
+ # tables in the join. For string and array conditions, use the table name in the condition.
+ #
+ # User.joins(:posts).where("posts.created_at < ?", Time.now)
+ #
+ # For hash conditions, you can either use the table name in the key, or use a sub-hash.
+ #
+ # User.joins(:posts).where({ "posts.published" => true })
+ # User.joins(:posts).where({ posts: { published: true } })
+ #
+ # === no argument
+ #
+ # If no argument is passed, #where returns a new instance of WhereChain, that
+ # can be chained with #not to return a new relation that negates the where clause.
+ #
+ # User.where.not(name: "Jon")
+ # # SELECT * FROM users WHERE name != 'Jon'
+ #
+ # See WhereChain for more details on #not.
+ #
+ # === blank condition
+ #
+ # If the condition is any blank-ish object, then #where is a no-op and returns
+ # the current relation.
+ def where(opts = :chain, *rest)
+ if :chain == opts
+ WhereChain.new(spawn)
+ elsif opts.blank?
+ self
+ else
+ spawn.where!(opts, *rest)
+ end
+ end
+
+ def where!(opts, *rest) # :nodoc:
+ opts = sanitize_forbidden_attributes(opts)
+ references!(PredicateBuilder.references(opts)) if Hash === opts
+ self.where_clause += where_clause_factory.build(opts, rest)
+ self
+ end
+
+ # Allows you to change a previously set where condition for a given attribute, instead of appending to that condition.
+ #
+ # Post.where(trashed: true).where(trashed: false)
+ # # WHERE `trashed` = 1 AND `trashed` = 0
+ #
+ # Post.where(trashed: true).rewhere(trashed: false)
+ # # WHERE `trashed` = 0
+ #
+ # Post.where(active: true).where(trashed: true).rewhere(trashed: false)
+ # # WHERE `active` = 1 AND `trashed` = 0
+ #
+ # This is short-hand for <tt>unscope(where: conditions.keys).where(conditions)</tt>.
+ # Note that unlike reorder, we're only unscoping the named conditions -- not the entire where statement.
+ def rewhere(conditions)
+ unscope(where: conditions.keys).where(conditions)
+ end
+
+ # Returns a new relation, which is the logical union of this relation and the one passed as an
+ # argument.
+ #
+ # The two relations must be structurally compatible: they must be scoping the same model, and
+ # they must differ only by #where (if no #group has been defined) or #having (if a #group is
+ # present). Neither relation may have a #limit, #offset, or #distinct set.
+ #
+ # Post.where("id = 1").or(Post.where("author_id = 3"))
+ # # SELECT `posts`.* FROM `posts` WHERE ((id = 1) OR (author_id = 3))
+ #
+ def or(other)
+ unless other.is_a? Relation
+ raise ArgumentError, "You have passed #{other.class.name} object to #or. Pass an ActiveRecord::Relation object instead."
+ end
+
+ spawn.or!(other)
+ end
+
+ def or!(other) # :nodoc:
+ incompatible_values = structurally_incompatible_values_for_or(other)
+
+ unless incompatible_values.empty?
+ raise ArgumentError, "Relation passed to #or must be structurally compatible. Incompatible values: #{incompatible_values}"
+ end
+
+ self.where_clause = self.where_clause.or(other.where_clause)
+ self.having_clause = having_clause.or(other.having_clause)
+ self.references_values += other.references_values
+
+ self
+ end
+
+ # Allows to specify a HAVING clause. Note that you can't use HAVING
+ # without also specifying a GROUP clause.
+ #
+ # Order.having('SUM(price) > 30').group('user_id')
+ def having(opts, *rest)
+ opts.blank? ? self : spawn.having!(opts, *rest)
+ end
+
+ def having!(opts, *rest) # :nodoc:
+ opts = sanitize_forbidden_attributes(opts)
+ references!(PredicateBuilder.references(opts)) if Hash === opts
+
+ self.having_clause += having_clause_factory.build(opts, rest)
+ self
+ end
+
+ # Specifies a limit for the number of records to retrieve.
+ #
+ # User.limit(10) # generated SQL has 'LIMIT 10'
+ #
+ # User.limit(10).limit(20) # generated SQL has 'LIMIT 20'
+ def limit(value)
+ spawn.limit!(value)
+ end
+
+ def limit!(value) # :nodoc:
+ self.limit_value = value
+ self
+ end
+
+ # Specifies the number of rows to skip before returning rows.
+ #
+ # User.offset(10) # generated SQL has "OFFSET 10"
+ #
+ # Should be used with order.
+ #
+ # User.offset(10).order("name ASC")
+ def offset(value)
+ spawn.offset!(value)
+ end
+
+ def offset!(value) # :nodoc:
+ self.offset_value = value
+ self
+ end
+
+ # Specifies locking settings (default to +true+). For more information
+ # on locking, please see ActiveRecord::Locking.
+ def lock(locks = true)
+ spawn.lock!(locks)
+ end
+
+ def lock!(locks = true) # :nodoc:
+ case locks
+ when String, TrueClass, NilClass
+ self.lock_value = locks || true
+ else
+ self.lock_value = false
+ end
+
+ self
+ end
+
+ # Returns a chainable relation with zero records.
+ #
+ # The returned relation implements the Null Object pattern. It is an
+ # object with defined null behavior and always returns an empty array of
+ # records without querying the database.
+ #
+ # Any subsequent condition chained to the returned relation will continue
+ # generating an empty relation and will not fire any query to the database.
+ #
+ # Used in cases where a method or scope could return zero records but the
+ # result needs to be chainable.
+ #
+ # For example:
+ #
+ # @posts = current_user.visible_posts.where(name: params[:name])
+ # # the visible_posts method is expected to return a chainable Relation
+ #
+ # def visible_posts
+ # case role
+ # when 'Country Manager'
+ # Post.where(country: country)
+ # when 'Reviewer'
+ # Post.published
+ # when 'Bad User'
+ # Post.none # It can't be chained if [] is returned.
+ # end
+ # end
+ #
+ def none
+ spawn.none!
+ end
+
+ def none! # :nodoc:
+ where!("1=0").extending!(NullRelation)
+ end
+
+ # Sets readonly attributes for the returned relation. If value is
+ # true (default), attempting to update a record will result in an error.
+ #
+ # users = User.readonly
+ # users.first.save
+ # => ActiveRecord::ReadOnlyRecord: User is marked as readonly
+ def readonly(value = true)
+ spawn.readonly!(value)
+ end
+
+ def readonly!(value = true) # :nodoc:
+ self.readonly_value = value
+ self
+ end
+
+ # Sets attributes to be used when creating new records from a
+ # relation object.
+ #
+ # users = User.where(name: 'Oscar')
+ # users.new.name # => 'Oscar'
+ #
+ # users = users.create_with(name: 'DHH')
+ # users.new.name # => 'DHH'
+ #
+ # You can pass +nil+ to #create_with to reset attributes:
+ #
+ # users = users.create_with(nil)
+ # users.new.name # => 'Oscar'
+ def create_with(value)
+ spawn.create_with!(value)
+ end
+
+ def create_with!(value) # :nodoc:
+ if value
+ value = sanitize_forbidden_attributes(value)
+ self.create_with_value = create_with_value.merge(value)
+ else
+ self.create_with_value = FROZEN_EMPTY_HASH
+ end
+
+ self
+ end
+
+ # Specifies table from which the records will be fetched. For example:
+ #
+ # Topic.select('title').from('posts')
+ # # SELECT title FROM posts
+ #
+ # Can accept other relation objects. For example:
+ #
+ # Topic.select('title').from(Topic.approved)
+ # # SELECT title FROM (SELECT * FROM topics WHERE approved = 't') subquery
+ #
+ # Topic.select('a.title').from(Topic.approved, :a)
+ # # SELECT a.title FROM (SELECT * FROM topics WHERE approved = 't') a
+ #
+ def from(value, subquery_name = nil)
+ spawn.from!(value, subquery_name)
+ end
+
+ def from!(value, subquery_name = nil) # :nodoc:
+ self.from_clause = Relation::FromClause.new(value, subquery_name)
+ self
+ end
+
+ # Specifies whether the records should be unique or not. For example:
+ #
+ # User.select(:name)
+ # # Might return two records with the same name
+ #
+ # User.select(:name).distinct
+ # # Returns 1 record per distinct name
+ #
+ # User.select(:name).distinct.distinct(false)
+ # # You can also remove the uniqueness
+ def distinct(value = true)
+ spawn.distinct!(value)
+ end
+
+ # Like #distinct, but modifies relation in place.
+ def distinct!(value = true) # :nodoc:
+ self.distinct_value = value
+ self
+ end
+
+ # Used to extend a scope with additional methods, either through
+ # a module or through a block provided.
+ #
+ # The object returned is a relation, which can be further extended.
+ #
+ # === Using a module
+ #
+ # module Pagination
+ # def page(number)
+ # # pagination code goes here
+ # end
+ # end
+ #
+ # scope = Model.all.extending(Pagination)
+ # scope.page(params[:page])
+ #
+ # You can also pass a list of modules:
+ #
+ # scope = Model.all.extending(Pagination, SomethingElse)
+ #
+ # === Using a block
+ #
+ # scope = Model.all.extending do
+ # def page(number)
+ # # pagination code goes here
+ # end
+ # end
+ # scope.page(params[:page])
+ #
+ # You can also use a block and a module list:
+ #
+ # scope = Model.all.extending(Pagination) do
+ # def per_page(number)
+ # # pagination code goes here
+ # end
+ # end
+ def extending(*modules, &block)
+ if modules.any? || block
+ spawn.extending!(*modules, &block)
+ else
+ self
+ end
+ end
+
+ def extending!(*modules, &block) # :nodoc:
+ modules << Module.new(&block) if block
+ modules.flatten!
+
+ self.extending_values += modules
+ extend(*extending_values) if extending_values.any?
+
+ self
+ end
+
+ # Reverse the existing order clause on the relation.
+ #
+ # User.order('name ASC').reverse_order # generated SQL has 'ORDER BY name DESC'
+ def reverse_order
+ spawn.reverse_order!
+ end
+
+ def reverse_order! # :nodoc:
+ orders = order_values.uniq
+ orders.reject!(&:blank?)
+ self.order_values = reverse_sql_order(orders)
+ self
+ end
+
+ def skip_query_cache! # :nodoc:
+ self.skip_query_cache_value = true
+ self
+ end
+
+ # Returns the Arel object associated with the relation.
+ def arel # :nodoc:
+ @arel ||= build_arel
+ end
+
+ # Returns a relation value with a given name
+ def get_value(name) # :nodoc:
+ @values[name] || default_value_for(name)
+ end
+
+ # Sets the relation value with the given name
+ def set_value(name, value) # :nodoc:
+ assert_mutability!
+ @values[name] = value
+ end
+
+ private
+
+ def assert_mutability!
+ raise ImmutableRelation if @loaded
+ raise ImmutableRelation if defined?(@arel) && @arel
+ end
+
+ def build_arel
+ arel = Arel::SelectManager.new(table)
+
+ build_joins(arel, joins_values.flatten) unless joins_values.empty?
+ build_left_outer_joins(arel, left_outer_joins_values.flatten) unless left_outer_joins_values.empty?
+
+ arel.where(where_clause.ast) unless where_clause.empty?
+ arel.having(having_clause.ast) unless having_clause.empty?
+ if limit_value
+ limit_attribute = Attribute.with_cast_value(
+ "LIMIT".freeze,
+ connection.sanitize_limit(limit_value),
+ Type.default_value,
+ )
+ arel.take(Arel::Nodes::BindParam.new(limit_attribute))
+ end
+ if offset_value
+ offset_attribute = Attribute.with_cast_value(
+ "OFFSET".freeze,
+ offset_value.to_i,
+ Type.default_value,
+ )
+ arel.skip(Arel::Nodes::BindParam.new(offset_attribute))
+ end
+ arel.group(*arel_columns(group_values.uniq.reject(&:blank?))) unless group_values.empty?
+
+ build_order(arel)
+
+ build_select(arel)
+
+ arel.distinct(distinct_value)
+ arel.from(build_from) unless from_clause.empty?
+ arel.lock(lock_value) if lock_value
+
+ arel
+ end
+
+ def build_from
+ opts = from_clause.value
+ name = from_clause.name
+ case opts
+ when Relation
+ name ||= "subquery"
+ opts.arel.as(name.to_s)
+ else
+ opts
+ end
+ end
+
+ def build_left_outer_joins(manager, outer_joins)
+ buckets = outer_joins.group_by do |join|
+ case join
+ when Hash, Symbol, Array
+ :association_join
+ else
+ raise ArgumentError, "only Hash, Symbol and Array are allowed"
+ end
+ end
+
+ build_join_query(manager, buckets, Arel::Nodes::OuterJoin)
+ end
+
+ def build_joins(manager, joins)
+ buckets = joins.group_by do |join|
+ case join
+ when String
+ :string_join
+ when Hash, Symbol, Array
+ :association_join
+ when ActiveRecord::Associations::JoinDependency
+ :stashed_join
+ when Arel::Nodes::Join
+ :join_node
+ else
+ raise "unknown class: %s" % join.class.name
+ end
+ end
+
+ build_join_query(manager, buckets, Arel::Nodes::InnerJoin)
+ end
+
+ def build_join_query(manager, buckets, join_type)
+ buckets.default = []
+
+ association_joins = buckets[:association_join]
+ stashed_association_joins = buckets[:stashed_join]
+ join_nodes = buckets[:join_node].uniq
+ string_joins = buckets[:string_join].map(&:strip).uniq
+
+ join_list = join_nodes + convert_join_strings_to_ast(manager, string_joins)
+
+ join_dependency = ActiveRecord::Associations::JoinDependency.new(
+ klass, table, association_joins, join_list
+ )
+
+ joins = join_dependency.join_constraints(stashed_association_joins, join_type)
+ joins.each { |join| manager.from(join) }
+
+ manager.join_sources.concat(join_list)
+
+ manager
+ end
+
+ def convert_join_strings_to_ast(table, joins)
+ joins
+ .flatten
+ .reject(&:blank?)
+ .map { |join| table.create_string_join(Arel.sql(join)) }
+ end
+
+ def build_select(arel)
+ if select_values.any?
+ arel.project(*arel_columns(select_values.uniq))
+ else
+ arel.project(table[Arel.star])
+ end
+ end
+
+ def arel_columns(columns)
+ columns.map do |field|
+ if (Symbol === field || String === field) && (klass.has_attribute?(field) || klass.attribute_alias?(field)) && !from_clause.value
+ arel_attribute(field)
+ elsif Symbol === field
+ connection.quote_table_name(field.to_s)
+ else
+ field
+ end
+ end
+ end
+
+ def reverse_sql_order(order_query)
+ if order_query.empty?
+ return [arel_attribute(primary_key).desc] if primary_key
+ raise IrreversibleOrderError,
+ "Relation has no current order and table has no primary key to be used as default order"
+ end
+
+ order_query.flat_map do |o|
+ case o
+ when Arel::Attribute
+ o.desc
+ when Arel::Nodes::Ordering
+ o.reverse
+ when String
+ if does_not_support_reverse?(o)
+ raise IrreversibleOrderError, "Order #{o.inspect} can not be reversed automatically"
+ end
+ o.split(",").map! do |s|
+ s.strip!
+ s.gsub!(/\sasc\Z/i, " DESC") || s.gsub!(/\sdesc\Z/i, " ASC") || s.concat(" DESC")
+ end
+ else
+ o
+ end
+ end
+ end
+
+ def does_not_support_reverse?(order)
+ # Uses SQL function with multiple arguments.
+ (order.include?(",") && order.split(",").find { |section| section.count("(") != section.count(")") }) ||
+ # Uses "nulls first" like construction.
+ /nulls (first|last)\Z/i.match?(order)
+ end
+
+ def build_order(arel)
+ orders = order_values.uniq
+ orders.reject!(&:blank?)
+
+ arel.order(*orders) unless orders.empty?
+ end
+
+ VALID_DIRECTIONS = [:asc, :desc, :ASC, :DESC,
+ "asc", "desc", "ASC", "DESC"].to_set # :nodoc:
+
+ def validate_order_args(args)
+ args.each do |arg|
+ next unless arg.is_a?(Hash)
+ arg.each do |_key, value|
+ unless VALID_DIRECTIONS.include?(value)
+ raise ArgumentError,
+ "Direction \"#{value}\" is invalid. Valid directions are: #{VALID_DIRECTIONS.to_a.inspect}"
+ end
+ end
+ end
+ end
+
+ def preprocess_order_args(order_args)
+ order_args.map! do |arg|
+ klass.send(:sanitize_sql_for_order, arg)
+ end
+ order_args.flatten!
+ validate_order_args(order_args)
+
+ references = order_args.grep(String)
+ references.map! { |arg| arg =~ /^\W?(\w+)\W?\./ && $1 }.compact!
+ references!(references) if references.any?
+
+ # if a symbol is given we prepend the quoted table name
+ order_args.map! do |arg|
+ case arg
+ when Symbol
+ arel_attribute(arg).asc
+ when Hash
+ arg.map { |field, dir|
+ case field
+ when Arel::Nodes::SqlLiteral
+ field.send(dir.downcase)
+ else
+ arel_attribute(field).send(dir.downcase)
+ end
+ }
+ else
+ arg
+ end
+ end.flatten!
+ end
+
+ # Checks to make sure that the arguments are not blank. Note that if some
+ # blank-like object were initially passed into the query method, then this
+ # method will not raise an error.
+ #
+ # Example:
+ #
+ # Post.references() # raises an error
+ # Post.references([]) # does not raise an error
+ #
+ # This particular method should be called with a method_name and the args
+ # passed into that method as an input. For example:
+ #
+ # def references(*args)
+ # check_if_method_has_arguments!("references", args)
+ # ...
+ # end
+ def check_if_method_has_arguments!(method_name, args)
+ if args.blank?
+ raise ArgumentError, "The method .#{method_name}() must contain arguments."
+ end
+ end
+
+ STRUCTURAL_OR_METHODS = Relation::VALUE_METHODS - [:extending, :where, :having, :unscope, :references]
+ def structurally_incompatible_values_for_or(other)
+ STRUCTURAL_OR_METHODS.reject do |method|
+ get_value(method) == other.get_value(method)
+ end
+ end
+
+ def where_clause_factory
+ @where_clause_factory ||= Relation::WhereClauseFactory.new(klass, predicate_builder)
+ end
+ alias having_clause_factory where_clause_factory
+
+ def default_value_for(name)
+ case name
+ when :create_with
+ FROZEN_EMPTY_HASH
+ when :readonly
+ false
+ when :where, :having
+ Relation::WhereClause.empty
+ when :from
+ Relation::FromClause.empty
+ when *Relation::MULTI_VALUE_METHODS
+ FROZEN_EMPTY_ARRAY
+ when *Relation::SINGLE_VALUE_METHODS
+ nil
+ else
+ raise ArgumentError, "unknown relation value #{name.inspect}"
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/record_fetch_warning.rb b/activerecord/lib/active_record/relation/record_fetch_warning.rb
new file mode 100644
index 0000000000..a7d07d23e1
--- /dev/null
+++ b/activerecord/lib/active_record/relation/record_fetch_warning.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class Relation
+ module RecordFetchWarning
+ # When this module is prepended to ActiveRecord::Relation and
+ # +config.active_record.warn_on_records_fetched_greater_than+ is
+ # set to an integer, if the number of records a query returns is
+ # greater than the value of +warn_on_records_fetched_greater_than+,
+ # a warning is logged. This allows for the detection of queries that
+ # return a large number of records, which could cause memory bloat.
+ #
+ # In most cases, fetching large number of records can be performed
+ # efficiently using the ActiveRecord::Batches methods.
+ # See ActiveRecord::Batches for more information.
+ def exec_queries
+ QueryRegistry.reset
+
+ super.tap do
+ if logger && warn_on_records_fetched_greater_than
+ if @records.length > warn_on_records_fetched_greater_than
+ logger.warn "Query fetched #{@records.size} #{@klass} records: #{QueryRegistry.queries.join(";")}"
+ end
+ end
+ end
+ end
+
+ # :stopdoc:
+ ActiveSupport::Notifications.subscribe("sql.active_record") do |*, payload|
+ QueryRegistry.queries << payload[:sql]
+ end
+ # :startdoc:
+
+ class QueryRegistry # :nodoc:
+ extend ActiveSupport::PerThreadRegistry
+
+ attr_reader :queries
+
+ def initialize
+ @queries = []
+ end
+
+ def reset
+ @queries.clear
+ end
+ end
+ end
+ end
+end
+
+ActiveRecord::Relation.prepend ActiveRecord::Relation::RecordFetchWarning
diff --git a/activerecord/lib/active_record/relation/spawn_methods.rb b/activerecord/lib/active_record/relation/spawn_methods.rb
new file mode 100644
index 0000000000..424894f835
--- /dev/null
+++ b/activerecord/lib/active_record/relation/spawn_methods.rb
@@ -0,0 +1,77 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/hash/except"
+require "active_support/core_ext/hash/slice"
+require_relative "merger"
+
+module ActiveRecord
+ module SpawnMethods
+ # This is overridden by Associations::CollectionProxy
+ def spawn #:nodoc:
+ clone
+ end
+
+ # Merges in the conditions from <tt>other</tt>, if <tt>other</tt> is an ActiveRecord::Relation.
+ # Returns an array representing the intersection of the resulting records with <tt>other</tt>, if <tt>other</tt> is an array.
+ #
+ # Post.where(published: true).joins(:comments).merge( Comment.where(spam: false) )
+ # # Performs a single join query with both where conditions.
+ #
+ # recent_posts = Post.order('created_at DESC').first(5)
+ # Post.where(published: true).merge(recent_posts)
+ # # Returns the intersection of all published posts with the 5 most recently created posts.
+ # # (This is just an example. You'd probably want to do this with a single query!)
+ #
+ # Procs will be evaluated by merge:
+ #
+ # Post.where(published: true).merge(-> { joins(:comments) })
+ # # => Post.where(published: true).joins(:comments)
+ #
+ # This is mainly intended for sharing common conditions between multiple associations.
+ def merge(other)
+ if other.is_a?(Array)
+ records & other
+ elsif other
+ spawn.merge!(other)
+ else
+ raise ArgumentError, "invalid argument: #{other.inspect}."
+ end
+ end
+
+ def merge!(other) # :nodoc:
+ if other.is_a?(Hash)
+ Relation::HashMerger.new(self, other).merge
+ elsif other.is_a?(Relation)
+ Relation::Merger.new(self, other).merge
+ elsif other.respond_to?(:to_proc)
+ instance_exec(&other)
+ else
+ raise ArgumentError, "#{other.inspect} is not an ActiveRecord::Relation"
+ end
+ end
+
+ # Removes from the query the condition(s) specified in +skips+.
+ #
+ # Post.order('id asc').except(:order) # discards the order condition
+ # Post.where('id > 10').order('id asc').except(:where) # discards the where condition but keeps the order
+ def except(*skips)
+ relation_with values.except(*skips)
+ end
+
+ # Removes any condition from the query other than the one(s) specified in +onlies+.
+ #
+ # Post.order('id asc').only(:where) # discards the order condition
+ # Post.order('id asc').only(:where, :order) # uses the specified order
+ def only(*onlies)
+ relation_with values.slice(*onlies)
+ end
+
+ private
+
+ def relation_with(values)
+ result = Relation.create(klass, table, predicate_builder, values)
+ result.extend(*extending_values) if extending_values.any?
+ result
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/where_clause.rb b/activerecord/lib/active_record/relation/where_clause.rb
new file mode 100644
index 0000000000..752bb38481
--- /dev/null
+++ b/activerecord/lib/active_record/relation/where_clause.rb
@@ -0,0 +1,172 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class Relation
+ class WhereClause # :nodoc:
+ delegate :any?, :empty?, to: :predicates
+
+ def initialize(predicates)
+ @predicates = predicates
+ end
+
+ def +(other)
+ WhereClause.new(
+ predicates + other.predicates,
+ )
+ end
+
+ def -(other)
+ WhereClause.new(
+ predicates - other.predicates,
+ )
+ end
+
+ def merge(other)
+ WhereClause.new(
+ predicates_unreferenced_by(other) + other.predicates,
+ )
+ end
+
+ def except(*columns)
+ WhereClause.new(except_predicates(columns))
+ end
+
+ def or(other)
+ left = self - other
+ common = self - left
+ right = other - common
+
+ if left.empty? || right.empty?
+ common
+ else
+ or_clause = WhereClause.new(
+ [left.ast.or(right.ast)],
+ )
+ common + or_clause
+ end
+ end
+
+ def to_h(table_name = nil)
+ equalities = predicates.grep(Arel::Nodes::Equality)
+ if table_name
+ equalities = equalities.select do |node|
+ node.left.relation.name == table_name
+ end
+ end
+
+ equalities.map { |node|
+ name = node.left.name.to_s
+ value = extract_node_value(node.right)
+ [name, value]
+ }.to_h
+ end
+
+ def ast
+ Arel::Nodes::And.new(predicates_with_wrapped_sql_literals)
+ end
+
+ def ==(other)
+ other.is_a?(WhereClause) &&
+ predicates == other.predicates
+ end
+
+ def invert
+ WhereClause.new(inverted_predicates)
+ end
+
+ def self.empty
+ @empty ||= new([])
+ end
+
+ protected
+
+ attr_reader :predicates
+
+ def referenced_columns
+ @referenced_columns ||= begin
+ equality_nodes = predicates.select { |n| equality_node?(n) }
+ Set.new(equality_nodes, &:left)
+ end
+ end
+
+ private
+
+ def predicates_unreferenced_by(other)
+ predicates.reject do |n|
+ equality_node?(n) && other.referenced_columns.include?(n.left)
+ end
+ end
+
+ def equality_node?(node)
+ node.respond_to?(:operator) && node.operator == :==
+ end
+
+ def inverted_predicates
+ predicates.map { |node| invert_predicate(node) }
+ end
+
+ def invert_predicate(node)
+ case node
+ when NilClass
+ raise ArgumentError, "Invalid argument for .where.not(), got nil."
+ when Arel::Nodes::In
+ Arel::Nodes::NotIn.new(node.left, node.right)
+ when Arel::Nodes::Equality
+ Arel::Nodes::NotEqual.new(node.left, node.right)
+ when String
+ Arel::Nodes::Not.new(Arel::Nodes::SqlLiteral.new(node))
+ else
+ Arel::Nodes::Not.new(node)
+ end
+ end
+
+ def except_predicates(columns)
+ self.predicates.reject do |node|
+ case node
+ when Arel::Nodes::Between, Arel::Nodes::In, Arel::Nodes::NotIn, Arel::Nodes::Equality, Arel::Nodes::NotEqual, Arel::Nodes::LessThan, Arel::Nodes::LessThanOrEqual, Arel::Nodes::GreaterThan, Arel::Nodes::GreaterThanOrEqual
+ subrelation = (node.left.kind_of?(Arel::Attributes::Attribute) ? node.left : node.right)
+ columns.include?(subrelation.name.to_s)
+ end
+ end
+ end
+
+ def predicates_with_wrapped_sql_literals
+ non_empty_predicates.map do |node|
+ case node
+ when Arel::Nodes::SqlLiteral, ::String
+ wrap_sql_literal(node)
+ else node
+ end
+ end
+ end
+
+ ARRAY_WITH_EMPTY_STRING = [""]
+ def non_empty_predicates
+ predicates - ARRAY_WITH_EMPTY_STRING
+ end
+
+ def wrap_sql_literal(node)
+ if ::String === node
+ node = Arel.sql(node)
+ end
+ Arel::Nodes::Grouping.new(node)
+ end
+
+ def extract_node_value(node)
+ case node
+ when Array
+ node.map { |v| extract_node_value(v) }
+ when Arel::Nodes::Casted, Arel::Nodes::Quoted
+ node.val
+ when Arel::Nodes::BindParam
+ value = node.value
+ if value.respond_to?(:value_before_type_cast)
+ value.value_before_type_cast
+ else
+ value
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/where_clause_factory.rb b/activerecord/lib/active_record/relation/where_clause_factory.rb
new file mode 100644
index 0000000000..1374785354
--- /dev/null
+++ b/activerecord/lib/active_record/relation/where_clause_factory.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class Relation
+ class WhereClauseFactory # :nodoc:
+ def initialize(klass, predicate_builder)
+ @klass = klass
+ @predicate_builder = predicate_builder
+ end
+
+ def build(opts, other)
+ case opts
+ when String, Array
+ parts = [klass.send(:sanitize_sql, other.empty? ? opts : ([opts] + other))]
+ when Hash
+ attributes = predicate_builder.resolve_column_aliases(opts)
+ attributes = klass.send(:expand_hash_conditions_for_aggregates, attributes)
+ attributes.stringify_keys!
+
+ parts = predicate_builder.build_from_hash(attributes)
+ when Arel::Nodes::Node
+ parts = [opts]
+ else
+ raise ArgumentError, "Unsupported argument type: #{opts} (#{opts.class})"
+ end
+
+ WhereClause.new(parts)
+ end
+
+ protected
+
+ attr_reader :klass, :predicate_builder
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/result.rb b/activerecord/lib/active_record/result.rb
new file mode 100644
index 0000000000..e54e8086dd
--- /dev/null
+++ b/activerecord/lib/active_record/result.rb
@@ -0,0 +1,149 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ ###
+ # This class encapsulates a result returned from calling
+ # {#exec_query}[rdoc-ref:ConnectionAdapters::DatabaseStatements#exec_query]
+ # on any database connection adapter. For example:
+ #
+ # result = ActiveRecord::Base.connection.exec_query('SELECT id, title, body FROM posts')
+ # result # => #<ActiveRecord::Result:0xdeadbeef>
+ #
+ # # Get the column names of the result:
+ # result.columns
+ # # => ["id", "title", "body"]
+ #
+ # # Get the record values of the result:
+ # result.rows
+ # # => [[1, "title_1", "body_1"],
+ # [2, "title_2", "body_2"],
+ # ...
+ # ]
+ #
+ # # Get an array of hashes representing the result (column => value):
+ # result.to_hash
+ # # => [{"id" => 1, "title" => "title_1", "body" => "body_1"},
+ # {"id" => 2, "title" => "title_2", "body" => "body_2"},
+ # ...
+ # ]
+ #
+ # # ActiveRecord::Result also includes Enumerable.
+ # result.each do |row|
+ # puts row['title'] + " " + row['body']
+ # end
+ class Result
+ include Enumerable
+
+ attr_reader :columns, :rows, :column_types
+
+ def initialize(columns, rows, column_types = {})
+ @columns = columns
+ @rows = rows
+ @hash_rows = nil
+ @column_types = column_types
+ end
+
+ # Returns the number of elements in the rows array.
+ def length
+ @rows.length
+ end
+
+ # Calls the given block once for each element in row collection, passing
+ # row as parameter.
+ #
+ # Returns an +Enumerator+ if no block is given.
+ def each
+ if block_given?
+ hash_rows.each { |row| yield row }
+ else
+ hash_rows.to_enum { @rows.size }
+ end
+ end
+
+ # Returns an array of hashes representing each row record.
+ def to_hash
+ hash_rows
+ end
+
+ alias :map! :map
+ alias :collect! :map
+
+ # Returns true if there are no records, otherwise false.
+ def empty?
+ rows.empty?
+ end
+
+ # Returns an array of hashes representing each row record.
+ def to_ary
+ hash_rows
+ end
+
+ def [](idx)
+ hash_rows[idx]
+ end
+
+ # Returns the first record from the rows collection.
+ # If the rows collection is empty, returns +nil+.
+ def first
+ return nil if @rows.empty?
+ Hash[@columns.zip(@rows.first)]
+ end
+
+ # Returns the last record from the rows collection.
+ # If the rows collection is empty, returns +nil+.
+ def last
+ return nil if @rows.empty?
+ Hash[@columns.zip(@rows.last)]
+ end
+
+ def cast_values(type_overrides = {}) # :nodoc:
+ types = columns.map { |name| column_type(name, type_overrides) }
+ result = rows.map do |values|
+ types.zip(values).map { |type, value| type.deserialize(value) }
+ end
+
+ columns.one? ? result.map!(&:first) : result
+ end
+
+ def initialize_copy(other)
+ @columns = columns.dup
+ @rows = rows.dup
+ @column_types = column_types.dup
+ @hash_rows = nil
+ end
+
+ private
+
+ def column_type(name, type_overrides = {})
+ type_overrides.fetch(name) do
+ column_types.fetch(name, Type.default_value)
+ end
+ end
+
+ def hash_rows
+ @hash_rows ||=
+ begin
+ # We freeze the strings to prevent them getting duped when
+ # used as keys in ActiveRecord::Base's @attributes hash
+ columns = @columns.map { |c| c.dup.freeze }
+ @rows.map { |row|
+ # In the past we used Hash[columns.zip(row)]
+ # though elegant, the verbose way is much more efficient
+ # both time and memory wise cause it avoids a big array allocation
+ # this method is called a lot and needs to be micro optimised
+ hash = {}
+
+ index = 0
+ length = columns.length
+
+ while index < length
+ hash[columns[index]] = row[index]
+ index += 1
+ end
+
+ hash
+ }
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/runtime_registry.rb b/activerecord/lib/active_record/runtime_registry.rb
new file mode 100644
index 0000000000..4975cb8967
--- /dev/null
+++ b/activerecord/lib/active_record/runtime_registry.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require "active_support/per_thread_registry"
+
+module ActiveRecord
+ # This is a thread locals registry for Active Record. For example:
+ #
+ # ActiveRecord::RuntimeRegistry.connection_handler
+ #
+ # returns the connection handler local to the current thread.
+ #
+ # See the documentation of ActiveSupport::PerThreadRegistry
+ # for further details.
+ class RuntimeRegistry # :nodoc:
+ extend ActiveSupport::PerThreadRegistry
+
+ attr_accessor :connection_handler, :sql_runtime
+
+ [:connection_handler, :sql_runtime].each do |val|
+ class_eval %{ def self.#{val}; instance.#{val}; end }, __FILE__, __LINE__
+ class_eval %{ def self.#{val}=(x); instance.#{val}=x; end }, __FILE__, __LINE__
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/sanitization.rb b/activerecord/lib/active_record/sanitization.rb
new file mode 100644
index 0000000000..91a4f1fad6
--- /dev/null
+++ b/activerecord/lib/active_record/sanitization.rb
@@ -0,0 +1,216 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Sanitization
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ private
+
+ # Accepts an array or string of SQL conditions and sanitizes
+ # them into a valid SQL fragment for a WHERE clause.
+ #
+ # sanitize_sql_for_conditions(["name=? and group_id=?", "foo'bar", 4])
+ # # => "name='foo''bar' and group_id=4"
+ #
+ # sanitize_sql_for_conditions(["name=:name and group_id=:group_id", name: "foo'bar", group_id: 4])
+ # # => "name='foo''bar' and group_id='4'"
+ #
+ # sanitize_sql_for_conditions(["name='%s' and group_id='%s'", "foo'bar", 4])
+ # # => "name='foo''bar' and group_id='4'"
+ #
+ # sanitize_sql_for_conditions("name='foo''bar' and group_id='4'")
+ # # => "name='foo''bar' and group_id='4'"
+ def sanitize_sql_for_conditions(condition) # :doc:
+ return nil if condition.blank?
+
+ case condition
+ when Array; sanitize_sql_array(condition)
+ else condition
+ end
+ end
+ alias :sanitize_sql :sanitize_sql_for_conditions
+ alias :sanitize_conditions :sanitize_sql
+ deprecate sanitize_conditions: :sanitize_sql
+
+ # Accepts an array, hash, or string of SQL conditions and sanitizes
+ # them into a valid SQL fragment for a SET clause.
+ #
+ # sanitize_sql_for_assignment(["name=? and group_id=?", nil, 4])
+ # # => "name=NULL and group_id=4"
+ #
+ # sanitize_sql_for_assignment(["name=:name and group_id=:group_id", name: nil, group_id: 4])
+ # # => "name=NULL and group_id=4"
+ #
+ # Post.send(:sanitize_sql_for_assignment, { name: nil, group_id: 4 })
+ # # => "`posts`.`name` = NULL, `posts`.`group_id` = 4"
+ #
+ # sanitize_sql_for_assignment("name=NULL and group_id='4'")
+ # # => "name=NULL and group_id='4'"
+ def sanitize_sql_for_assignment(assignments, default_table_name = table_name) # :doc:
+ case assignments
+ when Array; sanitize_sql_array(assignments)
+ when Hash; sanitize_sql_hash_for_assignment(assignments, default_table_name)
+ else assignments
+ end
+ end
+
+ # Accepts an array, or string of SQL conditions and sanitizes
+ # them into a valid SQL fragment for an ORDER clause.
+ #
+ # sanitize_sql_for_order(["field(id, ?)", [1,3,2]])
+ # # => "field(id, 1,3,2)"
+ #
+ # sanitize_sql_for_order("id ASC")
+ # # => "id ASC"
+ def sanitize_sql_for_order(condition) # :doc:
+ if condition.is_a?(Array) && condition.first.to_s.include?("?")
+ sanitize_sql_array(condition)
+ else
+ condition
+ end
+ end
+
+ # Accepts a hash of SQL conditions and replaces those attributes
+ # that correspond to a {#composed_of}[rdoc-ref:Aggregations::ClassMethods#composed_of]
+ # relationship with their expanded aggregate attribute values.
+ #
+ # Given:
+ #
+ # class Person < ActiveRecord::Base
+ # composed_of :address, class_name: "Address",
+ # mapping: [%w(address_street street), %w(address_city city)]
+ # end
+ #
+ # Then:
+ #
+ # { address: Address.new("813 abc st.", "chicago") }
+ # # => { address_street: "813 abc st.", address_city: "chicago" }
+ def expand_hash_conditions_for_aggregates(attrs) # :doc:
+ expanded_attrs = {}
+ attrs.each do |attr, value|
+ if aggregation = reflect_on_aggregation(attr.to_sym)
+ mapping = aggregation.mapping
+ mapping.each do |field_attr, aggregate_attr|
+ if mapping.size == 1 && !value.respond_to?(aggregate_attr)
+ expanded_attrs[field_attr] = value
+ else
+ expanded_attrs[field_attr] = value.send(aggregate_attr)
+ end
+ end
+ else
+ expanded_attrs[attr] = value
+ end
+ end
+ expanded_attrs
+ end
+
+ # Sanitizes a hash of attribute/value pairs into SQL conditions for a SET clause.
+ #
+ # sanitize_sql_hash_for_assignment({ status: nil, group_id: 1 }, "posts")
+ # # => "`posts`.`status` = NULL, `posts`.`group_id` = 1"
+ def sanitize_sql_hash_for_assignment(attrs, table) # :doc:
+ c = connection
+ attrs.map do |attr, value|
+ value = type_for_attribute(attr.to_s).serialize(value)
+ "#{c.quote_table_name_for_assignment(table, attr)} = #{c.quote(value)}"
+ end.join(", ")
+ end
+
+ # Sanitizes a +string+ so that it is safe to use within an SQL
+ # LIKE statement. This method uses +escape_character+ to escape all occurrences of "\", "_" and "%".
+ #
+ # sanitize_sql_like("100%")
+ # # => "100\\%"
+ #
+ # sanitize_sql_like("snake_cased_string")
+ # # => "snake\\_cased\\_string"
+ #
+ # sanitize_sql_like("100%", "!")
+ # # => "100!%"
+ #
+ # sanitize_sql_like("snake_cased_string", "!")
+ # # => "snake!_cased!_string"
+ def sanitize_sql_like(string, escape_character = "\\") # :doc:
+ pattern = Regexp.union(escape_character, "%", "_")
+ string.gsub(pattern) { |x| [escape_character, x].join }
+ end
+
+ # Accepts an array of conditions. The array has each value
+ # sanitized and interpolated into the SQL statement.
+ #
+ # sanitize_sql_array(["name=? and group_id=?", "foo'bar", 4])
+ # # => "name='foo''bar' and group_id=4"
+ #
+ # sanitize_sql_array(["name=:name and group_id=:group_id", name: "foo'bar", group_id: 4])
+ # # => "name='foo''bar' and group_id=4"
+ #
+ # sanitize_sql_array(["name='%s' and group_id='%s'", "foo'bar", 4])
+ # # => "name='foo''bar' and group_id='4'"
+ def sanitize_sql_array(ary) # :doc:
+ statement, *values = ary
+ if values.first.is_a?(Hash) && /:\w+/.match?(statement)
+ replace_named_bind_variables(statement, values.first)
+ elsif statement.include?("?")
+ replace_bind_variables(statement, values)
+ elsif statement.blank?
+ statement
+ else
+ statement % values.collect { |value| connection.quote_string(value.to_s) }
+ end
+ end
+
+ def replace_bind_variables(statement, values)
+ raise_if_bind_arity_mismatch(statement, statement.count("?"), values.size)
+ bound = values.dup
+ c = connection
+ statement.gsub(/\?/) do
+ replace_bind_variable(bound.shift, c)
+ end
+ end
+
+ def replace_bind_variable(value, c = connection)
+ if ActiveRecord::Relation === value
+ value.to_sql
+ else
+ quote_bound_value(value, c)
+ end
+ end
+
+ def replace_named_bind_variables(statement, bind_vars)
+ statement.gsub(/(:?):([a-zA-Z]\w*)/) do |match|
+ if $1 == ":" # skip postgresql casts
+ match # return the whole match
+ elsif bind_vars.include?(match = $2.to_sym)
+ replace_bind_variable(bind_vars[match])
+ else
+ raise PreparedStatementInvalid, "missing value for :#{match} in #{statement}"
+ end
+ end
+ end
+
+ def quote_bound_value(value, c = connection)
+ if value.respond_to?(:map) && !value.acts_like?(:string)
+ if value.respond_to?(:empty?) && value.empty?
+ c.quote(nil)
+ else
+ value.map { |v| c.quote(v) }.join(",")
+ end
+ else
+ c.quote(value)
+ end
+ end
+
+ def raise_if_bind_arity_mismatch(statement, expected, provided)
+ unless expected == provided
+ raise PreparedStatementInvalid, "wrong number of bind variables (#{provided} for #{expected}) in: #{statement}"
+ end
+ end
+ end
+
+ def quoted_id # :nodoc:
+ self.class.connection.quote(@attributes[self.class.primary_key].value_for_database)
+ end
+ deprecate :quoted_id
+ end
+end
diff --git a/activerecord/lib/active_record/schema.rb b/activerecord/lib/active_record/schema.rb
new file mode 100644
index 0000000000..f166c27fd9
--- /dev/null
+++ b/activerecord/lib/active_record/schema.rb
@@ -0,0 +1,70 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record \Schema
+ #
+ # Allows programmers to programmatically define a schema in a portable
+ # DSL. This means you can define tables, indexes, etc. without using SQL
+ # directly, so your applications can more easily support multiple
+ # databases.
+ #
+ # Usage:
+ #
+ # ActiveRecord::Schema.define do
+ # create_table :authors do |t|
+ # t.string :name, null: false
+ # end
+ #
+ # add_index :authors, :name, :unique
+ #
+ # create_table :posts do |t|
+ # t.integer :author_id, null: false
+ # t.string :subject
+ # t.text :body
+ # t.boolean :private, default: false
+ # end
+ #
+ # add_index :posts, :author_id
+ # end
+ #
+ # ActiveRecord::Schema is only supported by database adapters that also
+ # support migrations, the two features being very similar.
+ class Schema < Migration::Current
+ # Eval the given block. All methods available to the current connection
+ # adapter are available within the block, so you can easily use the
+ # database definition DSL to build up your schema (
+ # {create_table}[rdoc-ref:ConnectionAdapters::SchemaStatements#create_table],
+ # {add_index}[rdoc-ref:ConnectionAdapters::SchemaStatements#add_index], etc.).
+ #
+ # The +info+ hash is optional, and if given is used to define metadata
+ # about the current schema (currently, only the schema's version):
+ #
+ # ActiveRecord::Schema.define(version: 20380119000001) do
+ # ...
+ # end
+ def self.define(info = {}, &block)
+ new.define(info, &block)
+ end
+
+ def define(info, &block) # :nodoc:
+ instance_eval(&block)
+
+ if info[:version].present?
+ ActiveRecord::SchemaMigration.create_table
+ connection.assume_migrated_upto_version(info[:version], migrations_paths)
+ end
+
+ ActiveRecord::InternalMetadata.create_table
+ ActiveRecord::InternalMetadata[:environment] = ActiveRecord::Migrator.current_environment
+ end
+
+ private
+ # Returns the migrations paths.
+ #
+ # ActiveRecord::Schema.new.migrations_paths
+ # # => ["db/migrate"] # Rails migration path by default.
+ def migrations_paths
+ ActiveRecord::Migrator.migrations_paths
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/schema_dumper.rb b/activerecord/lib/active_record/schema_dumper.rb
new file mode 100644
index 0000000000..27a1c89bd1
--- /dev/null
+++ b/activerecord/lib/active_record/schema_dumper.rb
@@ -0,0 +1,255 @@
+# frozen_string_literal: true
+
+require "stringio"
+
+module ActiveRecord
+ # = Active Record Schema Dumper
+ #
+ # This class is used to dump the database schema for some connection to some
+ # output format (i.e., ActiveRecord::Schema).
+ class SchemaDumper #:nodoc:
+ private_class_method :new
+
+ ##
+ # :singleton-method:
+ # A list of tables which should not be dumped to the schema.
+ # Acceptable values are strings as well as regexp if ActiveRecord::Base.schema_format == :ruby.
+ # Only strings are accepted if ActiveRecord::Base.schema_format == :sql.
+ cattr_accessor :ignore_tables, default: []
+
+ class << self
+ def dump(connection = ActiveRecord::Base.connection, stream = STDOUT, config = ActiveRecord::Base)
+ new(connection, generate_options(config)).dump(stream)
+ stream
+ end
+
+ private
+ def generate_options(config)
+ {
+ table_name_prefix: config.table_name_prefix,
+ table_name_suffix: config.table_name_suffix
+ }
+ end
+ end
+
+ def dump(stream)
+ header(stream)
+ extensions(stream)
+ tables(stream)
+ trailer(stream)
+ stream
+ end
+
+ private
+
+ def initialize(connection, options = {})
+ @connection = connection
+ @version = Migrator::current_version rescue nil
+ @options = options
+ end
+
+ # turns 20170404131909 into "2017_04_04_131909"
+ def formatted_version
+ stringified = @version.to_s
+ return stringified unless stringified.length == 14
+ stringified.insert(4, "_").insert(7, "_").insert(10, "_")
+ end
+
+ def define_params
+ @version ? "version: #{formatted_version}" : ""
+ end
+
+ def header(stream)
+ stream.puts <<HEADER
+# This file is auto-generated from the current state of the database. Instead
+# of editing this file, please use the migrations feature of Active Record to
+# incrementally modify your database, and then regenerate this schema definition.
+#
+# Note that this schema.rb definition is the authoritative source for your
+# database schema. If you need to create the application database on another
+# system, you should be using db:schema:load, not running all the migrations
+# from scratch. The latter is a flawed and unsustainable approach (the more migrations
+# you'll amass, the slower it'll run and the greater likelihood for issues).
+#
+# It's strongly recommended that you check this file into your version control system.
+
+ActiveRecord::Schema.define(#{define_params}) do
+
+HEADER
+ end
+
+ def trailer(stream)
+ stream.puts "end"
+ end
+
+ def extensions(stream)
+ return unless @connection.supports_extensions?
+ extensions = @connection.extensions
+ if extensions.any?
+ stream.puts " # These are extensions that must be enabled in order to support this database"
+ extensions.sort.each do |extension|
+ stream.puts " enable_extension #{extension.inspect}"
+ end
+ stream.puts
+ end
+ end
+
+ def tables(stream)
+ sorted_tables = @connection.tables.sort
+
+ sorted_tables.each do |table_name|
+ table(table_name, stream) unless ignored?(table_name)
+ end
+
+ # dump foreign keys at the end to make sure all dependent tables exist.
+ if @connection.supports_foreign_keys?
+ sorted_tables.each do |tbl|
+ foreign_keys(tbl, stream) unless ignored?(tbl)
+ end
+ end
+ end
+
+ def table(table, stream)
+ columns = @connection.columns(table)
+ begin
+ tbl = StringIO.new
+
+ # first dump primary key column
+ pk = @connection.primary_key(table)
+
+ tbl.print " create_table #{remove_prefix_and_suffix(table).inspect}"
+
+ case pk
+ when String
+ tbl.print ", primary_key: #{pk.inspect}" unless pk == "id"
+ pkcol = columns.detect { |c| c.name == pk }
+ pkcolspec = @connection.column_spec_for_primary_key(pkcol)
+ if pkcolspec.present?
+ tbl.print ", #{format_colspec(pkcolspec)}"
+ end
+ when Array
+ tbl.print ", primary_key: #{pk.inspect}"
+ else
+ tbl.print ", id: false"
+ end
+ tbl.print ", force: :cascade"
+
+ table_options = @connection.table_options(table)
+ if table_options.present?
+ tbl.print ", #{format_options(table_options)}"
+ end
+
+ tbl.puts " do |t|"
+
+ # then dump all non-primary key columns
+ columns.each do |column|
+ raise StandardError, "Unknown type '#{column.sql_type}' for column '#{column.name}'" unless @connection.valid_type?(column.type)
+ next if column.name == pk
+ type, colspec = @connection.column_spec(column)
+ tbl.print " t.#{type} #{column.name.inspect}"
+ tbl.print ", #{format_colspec(colspec)}" if colspec.present?
+ tbl.puts
+ end
+
+ indexes_in_create(table, tbl)
+
+ tbl.puts " end"
+ tbl.puts
+
+ tbl.rewind
+ stream.print tbl.read
+ rescue => e
+ stream.puts "# Could not dump table #{table.inspect} because of following #{e.class}"
+ stream.puts "# #{e.message}"
+ stream.puts
+ end
+
+ stream
+ end
+
+ # Keep it for indexing materialized views
+ def indexes(table, stream)
+ if (indexes = @connection.indexes(table)).any?
+ add_index_statements = indexes.map do |index|
+ table_name = remove_prefix_and_suffix(index.table).inspect
+ " add_index #{([table_name] + index_parts(index)).join(', ')}"
+ end
+
+ stream.puts add_index_statements.sort.join("\n")
+ stream.puts
+ end
+ end
+
+ def indexes_in_create(table, stream)
+ if (indexes = @connection.indexes(table)).any?
+ index_statements = indexes.map do |index|
+ " t.index #{index_parts(index).join(', ')}"
+ end
+ stream.puts index_statements.sort.join("\n")
+ end
+ end
+
+ def index_parts(index)
+ index_parts = [
+ index.columns.inspect,
+ "name: #{index.name.inspect}",
+ ]
+ index_parts << "unique: true" if index.unique
+ index_parts << "length: { #{format_options(index.lengths)} }" if index.lengths.present?
+ index_parts << "order: { #{format_options(index.orders)} }" if index.orders.present?
+ index_parts << "where: #{index.where.inspect}" if index.where
+ index_parts << "using: #{index.using.inspect}" if !@connection.default_index_type?(index)
+ index_parts << "type: #{index.type.inspect}" if index.type
+ index_parts << "comment: #{index.comment.inspect}" if index.comment
+ index_parts
+ end
+
+ def foreign_keys(table, stream)
+ if (foreign_keys = @connection.foreign_keys(table)).any?
+ add_foreign_key_statements = foreign_keys.map do |foreign_key|
+ parts = [
+ "add_foreign_key #{remove_prefix_and_suffix(foreign_key.from_table).inspect}",
+ remove_prefix_and_suffix(foreign_key.to_table).inspect,
+ ]
+
+ if foreign_key.column != @connection.foreign_key_column_for(foreign_key.to_table)
+ parts << "column: #{foreign_key.column.inspect}"
+ end
+
+ if foreign_key.custom_primary_key?
+ parts << "primary_key: #{foreign_key.primary_key.inspect}"
+ end
+
+ if foreign_key.name !~ /^fk_rails_[0-9a-f]{10}$/
+ parts << "name: #{foreign_key.name.inspect}"
+ end
+
+ parts << "on_update: #{foreign_key.on_update.inspect}" if foreign_key.on_update
+ parts << "on_delete: #{foreign_key.on_delete.inspect}" if foreign_key.on_delete
+
+ " #{parts.join(', ')}"
+ end
+
+ stream.puts add_foreign_key_statements.sort.join("\n")
+ end
+ end
+
+ def format_colspec(colspec)
+ colspec.map { |key, value| "#{key}: #{value}" }.join(", ")
+ end
+
+ def format_options(options)
+ options.map { |key, value| "#{key}: #{value.inspect}" }.join(", ")
+ end
+
+ def remove_prefix_and_suffix(table)
+ table.gsub(/^(#{@options[:table_name_prefix]})(.+)(#{@options[:table_name_suffix]})$/, "\\2")
+ end
+
+ def ignored?(table_name)
+ [ActiveRecord::Base.schema_migrations_table_name, ActiveRecord::Base.internal_metadata_table_name, ignore_tables].flatten.any? do |ignored|
+ ignored === remove_prefix_and_suffix(table_name)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/schema_migration.rb b/activerecord/lib/active_record/schema_migration.rb
new file mode 100644
index 0000000000..339a5334a8
--- /dev/null
+++ b/activerecord/lib/active_record/schema_migration.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require_relative "scoping/default"
+require_relative "scoping/named"
+
+module ActiveRecord
+ # This class is used to create a table that keeps track of which migrations
+ # have been applied to a given database. When a migration is run, its schema
+ # number is inserted in to the `SchemaMigration.table_name` so it doesn't need
+ # to be executed the next time.
+ class SchemaMigration < ActiveRecord::Base # :nodoc:
+ class << self
+ def primary_key
+ "version"
+ end
+
+ def table_name
+ "#{table_name_prefix}#{ActiveRecord::Base.schema_migrations_table_name}#{table_name_suffix}"
+ end
+
+ def table_exists?
+ connection.table_exists?(table_name)
+ end
+
+ def create_table
+ unless table_exists?
+ version_options = connection.internal_string_options_for_primary_key
+
+ connection.create_table(table_name, id: false) do |t|
+ t.string :version, version_options
+ end
+ end
+ end
+
+ def drop_table
+ connection.drop_table table_name, if_exists: true
+ end
+
+ def normalize_migration_number(number)
+ "%.3d" % number.to_i
+ end
+
+ def normalized_versions
+ all_versions.map { |v| normalize_migration_number v }
+ end
+
+ def all_versions
+ order(:version).pluck(:version)
+ end
+ end
+
+ def version
+ super.to_i
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/scoping.rb b/activerecord/lib/active_record/scoping.rb
new file mode 100644
index 0000000000..da585a9562
--- /dev/null
+++ b/activerecord/lib/active_record/scoping.rb
@@ -0,0 +1,106 @@
+# frozen_string_literal: true
+
+require "active_support/per_thread_registry"
+
+module ActiveRecord
+ module Scoping
+ extend ActiveSupport::Concern
+
+ included do
+ include Default
+ include Named
+ end
+
+ module ClassMethods
+ def current_scope(skip_inherited_scope = false) # :nodoc:
+ ScopeRegistry.value_for(:current_scope, self, skip_inherited_scope)
+ end
+
+ def current_scope=(scope) #:nodoc:
+ ScopeRegistry.set_value_for(:current_scope, self, scope)
+ end
+
+ # Collects attributes from scopes that should be applied when creating
+ # an AR instance for the particular class this is called on.
+ def scope_attributes # :nodoc:
+ all.scope_for_create
+ end
+
+ # Are there attributes associated with this scope?
+ def scope_attributes? # :nodoc:
+ current_scope
+ end
+ end
+
+ def populate_with_current_scope_attributes # :nodoc:
+ return unless self.class.scope_attributes?
+
+ attributes = self.class.scope_attributes
+ _assign_attributes(attributes) if attributes.any?
+ end
+
+ def initialize_internals_callback # :nodoc:
+ super
+ populate_with_current_scope_attributes
+ end
+
+ # This class stores the +:current_scope+ and +:ignore_default_scope+ values
+ # for different classes. The registry is stored as a thread local, which is
+ # accessed through +ScopeRegistry.current+.
+ #
+ # This class allows you to store and get the scope values on different
+ # classes and different types of scopes. For example, if you are attempting
+ # to get the current_scope for the +Board+ model, then you would use the
+ # following code:
+ #
+ # registry = ActiveRecord::Scoping::ScopeRegistry
+ # registry.set_value_for(:current_scope, Board, some_new_scope)
+ #
+ # Now when you run:
+ #
+ # registry.value_for(:current_scope, Board)
+ #
+ # You will obtain whatever was defined in +some_new_scope+. The #value_for
+ # and #set_value_for methods are delegated to the current ScopeRegistry
+ # object, so the above example code can also be called as:
+ #
+ # ActiveRecord::Scoping::ScopeRegistry.set_value_for(:current_scope,
+ # Board, some_new_scope)
+ class ScopeRegistry # :nodoc:
+ extend ActiveSupport::PerThreadRegistry
+
+ VALID_SCOPE_TYPES = [:current_scope, :ignore_default_scope]
+
+ def initialize
+ @registry = Hash.new { |hash, key| hash[key] = {} }
+ end
+
+ # Obtains the value for a given +scope_type+ and +model+.
+ def value_for(scope_type, model, skip_inherited_scope = false)
+ raise_invalid_scope_type!(scope_type)
+ return @registry[scope_type][model.name] if skip_inherited_scope
+ klass = model
+ base = model.base_class
+ while klass <= base
+ value = @registry[scope_type][klass.name]
+ return value if value
+ klass = klass.superclass
+ end
+ end
+
+ # Sets the +value+ for a given +scope_type+ and +model+.
+ def set_value_for(scope_type, model, value)
+ raise_invalid_scope_type!(scope_type)
+ @registry[scope_type][model.name] = value
+ end
+
+ private
+
+ def raise_invalid_scope_type!(scope_type)
+ if !VALID_SCOPE_TYPES.include?(scope_type)
+ raise ArgumentError, "Invalid scope type '#{scope_type}' sent to the registry. Scope types must be included in VALID_SCOPE_TYPES"
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/scoping/default.rb b/activerecord/lib/active_record/scoping/default.rb
new file mode 100644
index 0000000000..86ae374318
--- /dev/null
+++ b/activerecord/lib/active_record/scoping/default.rb
@@ -0,0 +1,152 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Scoping
+ module Default
+ extend ActiveSupport::Concern
+
+ included do
+ # Stores the default scope for the class.
+ class_attribute :default_scopes, instance_writer: false, instance_predicate: false, default: []
+ class_attribute :default_scope_override, instance_writer: false, instance_predicate: false, default: nil
+ end
+
+ module ClassMethods
+ # Returns a scope for the model without the previously set scopes.
+ #
+ # class Post < ActiveRecord::Base
+ # def self.default_scope
+ # where(published: true)
+ # end
+ # end
+ #
+ # Post.all # Fires "SELECT * FROM posts WHERE published = true"
+ # Post.unscoped.all # Fires "SELECT * FROM posts"
+ # Post.where(published: false).unscoped.all # Fires "SELECT * FROM posts"
+ #
+ # This method also accepts a block. All queries inside the block will
+ # not use the previously set scopes.
+ #
+ # Post.unscoped {
+ # Post.limit(10) # Fires "SELECT * FROM posts LIMIT 10"
+ # }
+ def unscoped
+ block_given? ? relation.scoping { yield } : relation
+ end
+
+ # Are there attributes associated with this scope?
+ def scope_attributes? # :nodoc:
+ super || default_scopes.any? || respond_to?(:default_scope)
+ end
+
+ def before_remove_const #:nodoc:
+ self.current_scope = nil
+ end
+
+ private
+
+ # Use this macro in your model to set a default scope for all operations on
+ # the model.
+ #
+ # class Article < ActiveRecord::Base
+ # default_scope { where(published: true) }
+ # end
+ #
+ # Article.all # => SELECT * FROM articles WHERE published = true
+ #
+ # The #default_scope is also applied while creating/building a record.
+ # It is not applied while updating a record.
+ #
+ # Article.new.published # => true
+ # Article.create.published # => true
+ #
+ # (You can also pass any object which responds to +call+ to the
+ # +default_scope+ macro, and it will be called when building the
+ # default scope.)
+ #
+ # If you use multiple #default_scope declarations in your model then
+ # they will be merged together:
+ #
+ # class Article < ActiveRecord::Base
+ # default_scope { where(published: true) }
+ # default_scope { where(rating: 'G') }
+ # end
+ #
+ # Article.all # => SELECT * FROM articles WHERE published = true AND rating = 'G'
+ #
+ # This is also the case with inheritance and module includes where the
+ # parent or module defines a #default_scope and the child or including
+ # class defines a second one.
+ #
+ # If you need to do more complex things with a default scope, you can
+ # alternatively define it as a class method:
+ #
+ # class Article < ActiveRecord::Base
+ # def self.default_scope
+ # # Should return a scope, you can call 'super' here etc.
+ # end
+ # end
+ def default_scope(scope = nil) # :doc:
+ scope = Proc.new if block_given?
+
+ if scope.is_a?(Relation) || !scope.respond_to?(:call)
+ raise ArgumentError,
+ "Support for calling #default_scope without a block is removed. For example instead " \
+ "of `default_scope where(color: 'red')`, please use " \
+ "`default_scope { where(color: 'red') }`. (Alternatively you can just redefine " \
+ "self.default_scope.)"
+ end
+
+ self.default_scopes += [scope]
+ end
+
+ def build_default_scope(base_rel = nil)
+ return if abstract_class?
+
+ if default_scope_override.nil?
+ self.default_scope_override = !Base.is_a?(method(:default_scope).owner)
+ end
+
+ if default_scope_override
+ # The user has defined their own default scope method, so call that
+ evaluate_default_scope do
+ if scope = default_scope
+ (base_rel ||= relation).merge(scope)
+ end
+ end
+ elsif default_scopes.any?
+ base_rel ||= relation
+ evaluate_default_scope do
+ default_scopes.inject(base_rel) do |default_scope, scope|
+ scope = scope.respond_to?(:to_proc) ? scope : scope.method(:call)
+ default_scope.merge(base_rel.instance_exec(&scope))
+ end
+ end
+ end
+ end
+
+ def ignore_default_scope?
+ ScopeRegistry.value_for(:ignore_default_scope, base_class)
+ end
+
+ def ignore_default_scope=(ignore)
+ ScopeRegistry.set_value_for(:ignore_default_scope, base_class, ignore)
+ end
+
+ # The ignore_default_scope flag is used to prevent an infinite recursion
+ # situation where a default scope references a scope which has a default
+ # scope which references a scope...
+ def evaluate_default_scope
+ return if ignore_default_scope?
+
+ begin
+ self.ignore_default_scope = true
+ yield
+ ensure
+ self.ignore_default_scope = false
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/scoping/named.rb b/activerecord/lib/active_record/scoping/named.rb
new file mode 100644
index 0000000000..43cce19c1f
--- /dev/null
+++ b/activerecord/lib/active_record/scoping/named.rb
@@ -0,0 +1,189 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/array"
+require "active_support/core_ext/hash/except"
+require "active_support/core_ext/kernel/singleton_class"
+
+module ActiveRecord
+ # = Active Record \Named \Scopes
+ module Scoping
+ module Named
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ # Returns an ActiveRecord::Relation scope object.
+ #
+ # posts = Post.all
+ # posts.size # Fires "select count(*) from posts" and returns the count
+ # posts.each {|p| puts p.name } # Fires "select * from posts" and loads post objects
+ #
+ # fruits = Fruit.all
+ # fruits = fruits.where(color: 'red') if options[:red_only]
+ # fruits = fruits.limit(10) if limited?
+ #
+ # You can define a scope that applies to all finders using
+ # {default_scope}[rdoc-ref:Scoping::Default::ClassMethods#default_scope].
+ def all
+ if current_scope
+ current_scope.clone
+ else
+ default_scoped
+ end
+ end
+
+ def default_scoped(scope = relation) # :nodoc:
+ build_default_scope(scope) || scope
+ end
+
+ def default_extensions # :nodoc:
+ if scope = current_scope || build_default_scope
+ scope.extensions
+ else
+ []
+ end
+ end
+
+ # Adds a class method for retrieving and querying objects.
+ # The method is intended to return an ActiveRecord::Relation
+ # object, which is composable with other scopes.
+ # If it returns +nil+ or +false+, an
+ # {all}[rdoc-ref:Scoping::Named::ClassMethods#all] scope is returned instead.
+ #
+ # A \scope represents a narrowing of a database query, such as
+ # <tt>where(color: :red).select('shirts.*').includes(:washing_instructions)</tt>.
+ #
+ # class Shirt < ActiveRecord::Base
+ # scope :red, -> { where(color: 'red') }
+ # scope :dry_clean_only, -> { joins(:washing_instructions).where('washing_instructions.dry_clean_only = ?', true) }
+ # end
+ #
+ # The above calls to #scope define class methods <tt>Shirt.red</tt> and
+ # <tt>Shirt.dry_clean_only</tt>. <tt>Shirt.red</tt>, in effect,
+ # represents the query <tt>Shirt.where(color: 'red')</tt>.
+ #
+ # You should always pass a callable object to the scopes defined
+ # with #scope. This ensures that the scope is re-evaluated each
+ # time it is called.
+ #
+ # Note that this is simply 'syntactic sugar' for defining an actual
+ # class method:
+ #
+ # class Shirt < ActiveRecord::Base
+ # def self.red
+ # where(color: 'red')
+ # end
+ # end
+ #
+ # Unlike <tt>Shirt.find(...)</tt>, however, the object returned by
+ # <tt>Shirt.red</tt> is not an Array but an ActiveRecord::Relation,
+ # which is composable with other scopes; it resembles the association object
+ # constructed by a {has_many}[rdoc-ref:Associations::ClassMethods#has_many]
+ # declaration. For instance, you can invoke <tt>Shirt.red.first</tt>, <tt>Shirt.red.count</tt>,
+ # <tt>Shirt.red.where(size: 'small')</tt>. Also, just as with the
+ # association objects, named \scopes act like an Array, implementing
+ # Enumerable; <tt>Shirt.red.each(&block)</tt>, <tt>Shirt.red.first</tt>,
+ # and <tt>Shirt.red.inject(memo, &block)</tt> all behave as if
+ # <tt>Shirt.red</tt> really was an array.
+ #
+ # These named \scopes are composable. For instance,
+ # <tt>Shirt.red.dry_clean_only</tt> will produce all shirts that are
+ # both red and dry clean only. Nested finds and calculations also work
+ # with these compositions: <tt>Shirt.red.dry_clean_only.count</tt>
+ # returns the number of garments for which these criteria obtain.
+ # Similarly with <tt>Shirt.red.dry_clean_only.average(:thread_count)</tt>.
+ #
+ # All scopes are available as class methods on the ActiveRecord::Base
+ # descendant upon which the \scopes were defined. But they are also
+ # available to {has_many}[rdoc-ref:Associations::ClassMethods#has_many]
+ # associations. If,
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :shirts
+ # end
+ #
+ # then <tt>elton.shirts.red.dry_clean_only</tt> will return all of
+ # Elton's red, dry clean only shirts.
+ #
+ # \Named scopes can also have extensions, just as with
+ # {has_many}[rdoc-ref:Associations::ClassMethods#has_many] declarations:
+ #
+ # class Shirt < ActiveRecord::Base
+ # scope :red, -> { where(color: 'red') } do
+ # def dom_id
+ # 'red_shirts'
+ # end
+ # end
+ # end
+ #
+ # Scopes can also be used while creating/building a record.
+ #
+ # class Article < ActiveRecord::Base
+ # scope :published, -> { where(published: true) }
+ # end
+ #
+ # Article.published.new.published # => true
+ # Article.published.create.published # => true
+ #
+ # \Class methods on your model are automatically available
+ # on scopes. Assuming the following setup:
+ #
+ # class Article < ActiveRecord::Base
+ # scope :published, -> { where(published: true) }
+ # scope :featured, -> { where(featured: true) }
+ #
+ # def self.latest_article
+ # order('published_at desc').first
+ # end
+ #
+ # def self.titles
+ # pluck(:title)
+ # end
+ # end
+ #
+ # We are able to call the methods like this:
+ #
+ # Article.published.featured.latest_article
+ # Article.featured.titles
+ def scope(name, body, &block)
+ unless body.respond_to?(:call)
+ raise ArgumentError, "The scope body needs to be callable."
+ end
+
+ if dangerous_class_method?(name)
+ raise ArgumentError, "You tried to define a scope named \"#{name}\" " \
+ "on the model \"#{self.name}\", but Active Record already defined " \
+ "a class method with the same name."
+ end
+
+ valid_scope_name?(name)
+ extension = Module.new(&block) if block
+
+ if body.respond_to?(:to_proc)
+ singleton_class.send(:define_method, name) do |*args|
+ scope = all
+ scope = scope.instance_exec(*args, &body) || scope
+ scope = scope.extending(extension) if extension
+ scope
+ end
+ else
+ singleton_class.send(:define_method, name) do |*args|
+ scope = all
+ scope = scope.scoping { body.call(*args) || scope }
+ scope = scope.extending(extension) if extension
+ scope
+ end
+ end
+ end
+
+ private
+
+ def valid_scope_name?(name)
+ if respond_to?(name, true) && logger
+ logger.warn "Creating scope :#{name}. " \
+ "Overwriting existing method #{self.name}.#{name}."
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/secure_token.rb b/activerecord/lib/active_record/secure_token.rb
new file mode 100644
index 0000000000..bcdb33901b
--- /dev/null
+++ b/activerecord/lib/active_record/secure_token.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module SecureToken
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ # Example using #has_secure_token
+ #
+ # # Schema: User(token:string, auth_token:string)
+ # class User < ActiveRecord::Base
+ # has_secure_token
+ # has_secure_token :auth_token
+ # end
+ #
+ # user = User.new
+ # user.save
+ # user.token # => "pX27zsMN2ViQKta1bGfLmVJE"
+ # user.auth_token # => "77TMHrHJFvFDwodq8w7Ev2m7"
+ # user.regenerate_token # => true
+ # user.regenerate_auth_token # => true
+ #
+ # <tt>SecureRandom::base58</tt> is used to generate the 24-character unique token, so collisions are highly unlikely.
+ #
+ # Note that it's still possible to generate a race condition in the database in the same way that
+ # {validates_uniqueness_of}[rdoc-ref:Validations::ClassMethods#validates_uniqueness_of] can.
+ # You're encouraged to add a unique index in the database to deal with this even more unlikely scenario.
+ def has_secure_token(attribute = :token)
+ # Load securerandom only when has_secure_token is used.
+ require "active_support/core_ext/securerandom"
+ define_method("regenerate_#{attribute}") { update! attribute => self.class.generate_unique_secure_token }
+ before_create { send("#{attribute}=", self.class.generate_unique_secure_token) unless send("#{attribute}?") }
+ end
+
+ def generate_unique_secure_token
+ SecureRandom.base58(24)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/serialization.rb b/activerecord/lib/active_record/serialization.rb
new file mode 100644
index 0000000000..741fea43ce
--- /dev/null
+++ b/activerecord/lib/active_record/serialization.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module ActiveRecord #:nodoc:
+ # = Active Record \Serialization
+ module Serialization
+ extend ActiveSupport::Concern
+ include ActiveModel::Serializers::JSON
+
+ included do
+ self.include_root_in_json = false
+ end
+
+ def serializable_hash(options = nil)
+ options = options.try(:dup) || {}
+
+ options[:except] = Array(options[:except]).map(&:to_s)
+ options[:except] |= Array(self.class.inheritance_column)
+
+ super(options)
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/statement_cache.rb b/activerecord/lib/active_record/statement_cache.rb
new file mode 100644
index 0000000000..64657089b5
--- /dev/null
+++ b/activerecord/lib/active_record/statement_cache.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # Statement cache is used to cache a single statement in order to avoid creating the AST again.
+ # Initializing the cache is done by passing the statement in the create block:
+ #
+ # cache = StatementCache.create(Book.connection) do |params|
+ # Book.where(name: "my book").where("author_id > 3")
+ # end
+ #
+ # The cached statement is executed by using the
+ # {connection.execute}[rdoc-ref:ConnectionAdapters::DatabaseStatements#execute] method:
+ #
+ # cache.execute([], Book, Book.connection)
+ #
+ # The relation returned by the block is cached, and for each
+ # {execute}[rdoc-ref:ConnectionAdapters::DatabaseStatements#execute]
+ # call the cached relation gets duped. Database is queried when +to_a+ is called on the relation.
+ #
+ # If you want to cache the statement without the values you can use the +bind+ method of the
+ # block parameter.
+ #
+ # cache = StatementCache.create(Book.connection) do |params|
+ # Book.where(name: params.bind)
+ # end
+ #
+ # And pass the bind values as the first argument of +execute+ call.
+ #
+ # cache.execute(["my book"], Book, Book.connection)
+ class StatementCache # :nodoc:
+ class Substitute; end # :nodoc:
+
+ class Query # :nodoc:
+ def initialize(sql)
+ @sql = sql
+ end
+
+ def sql_for(binds, connection)
+ @sql
+ end
+ end
+
+ class PartialQuery < Query # :nodoc:
+ def initialize(values)
+ @values = values
+ @indexes = values.each_with_index.find_all { |thing, i|
+ Arel::Nodes::BindParam === thing
+ }.map(&:last)
+ end
+
+ def sql_for(binds, connection)
+ val = @values.dup
+ casted_binds = binds.map(&:value_for_database)
+ @indexes.each { |i| val[i] = connection.quote(casted_binds.shift) }
+ val.join
+ end
+ end
+
+ def self.query(sql)
+ Query.new(sql)
+ end
+
+ def self.partial_query(values)
+ PartialQuery.new(values)
+ end
+
+ class Params # :nodoc:
+ def bind; Substitute.new; end
+ end
+
+ class BindMap # :nodoc:
+ def initialize(bound_attributes)
+ @indexes = []
+ @bound_attributes = bound_attributes
+
+ bound_attributes.each_with_index do |attr, i|
+ if Substitute === attr.value
+ @indexes << i
+ end
+ end
+ end
+
+ def bind(values)
+ bas = @bound_attributes.dup
+ @indexes.each_with_index { |offset, i| bas[offset] = bas[offset].with_cast_value(values[i]) }
+ bas
+ end
+ end
+
+ attr_reader :bind_map, :query_builder
+
+ def self.create(connection, block = Proc.new)
+ relation = block.call Params.new
+ query_builder, binds = connection.cacheable_query(self, relation.arel)
+ bind_map = BindMap.new(binds)
+ new query_builder, bind_map
+ end
+
+ def initialize(query_builder, bind_map)
+ @query_builder = query_builder
+ @bind_map = bind_map
+ end
+
+ def execute(params, klass, connection, &block)
+ bind_values = bind_map.bind params
+
+ sql = query_builder.sql_for bind_values, connection
+
+ klass.find_by_sql(sql, bind_values, preparable: true, &block)
+ end
+ alias :call :execute
+ end
+end
diff --git a/activerecord/lib/active_record/store.rb b/activerecord/lib/active_record/store.rb
new file mode 100644
index 0000000000..202b82fa61
--- /dev/null
+++ b/activerecord/lib/active_record/store.rb
@@ -0,0 +1,211 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/hash/indifferent_access"
+
+module ActiveRecord
+ # Store gives you a thin wrapper around serialize for the purpose of storing hashes in a single column.
+ # It's like a simple key/value store baked into your record when you don't care about being able to
+ # query that store outside the context of a single record.
+ #
+ # You can then declare accessors to this store that are then accessible just like any other attribute
+ # of the model. This is very helpful for easily exposing store keys to a form or elsewhere that's
+ # already built around just accessing attributes on the model.
+ #
+ # Make sure that you declare the database column used for the serialized store as a text, so there's
+ # plenty of room.
+ #
+ # You can set custom coder to encode/decode your serialized attributes to/from different formats.
+ # JSON, YAML, Marshal are supported out of the box. Generally it can be any wrapper that provides +load+ and +dump+.
+ #
+ # NOTE: If you are using PostgreSQL specific columns like +hstore+ or +json+ there is no need for
+ # the serialization provided by {.store}[rdoc-ref:rdoc-ref:ClassMethods#store].
+ # Simply use {.store_accessor}[rdoc-ref:ClassMethods#store_accessor] instead to generate
+ # the accessor methods. Be aware that these columns use a string keyed hash and do not allow access
+ # using a symbol.
+ #
+ # NOTE: The default validations with the exception of +uniqueness+ will work.
+ # For example, if you want to check for +uniqueness+ with +hstore+ you will
+ # need to use a custom validation to handle it.
+ #
+ # Examples:
+ #
+ # class User < ActiveRecord::Base
+ # store :settings, accessors: [ :color, :homepage ], coder: JSON
+ # end
+ #
+ # u = User.new(color: 'black', homepage: '37signals.com')
+ # u.color # Accessor stored attribute
+ # u.settings[:country] = 'Denmark' # Any attribute, even if not specified with an accessor
+ #
+ # # There is no difference between strings and symbols for accessing custom attributes
+ # u.settings[:country] # => 'Denmark'
+ # u.settings['country'] # => 'Denmark'
+ #
+ # # Add additional accessors to an existing store through store_accessor
+ # class SuperUser < User
+ # store_accessor :settings, :privileges, :servants
+ # end
+ #
+ # The stored attribute names can be retrieved using {.stored_attributes}[rdoc-ref:rdoc-ref:ClassMethods#stored_attributes].
+ #
+ # User.stored_attributes[:settings] # [:color, :homepage]
+ #
+ # == Overwriting default accessors
+ #
+ # All stored values are automatically available through accessors on the Active Record
+ # object, but sometimes you want to specialize this behavior. This can be done by overwriting
+ # the default accessors (using the same name as the attribute) and calling <tt>super</tt>
+ # to actually change things.
+ #
+ # class Song < ActiveRecord::Base
+ # # Uses a stored integer to hold the volume adjustment of the song
+ # store :settings, accessors: [:volume_adjustment]
+ #
+ # def volume_adjustment=(decibels)
+ # super(decibels.to_i)
+ # end
+ #
+ # def volume_adjustment
+ # super.to_i
+ # end
+ # end
+ module Store
+ extend ActiveSupport::Concern
+
+ included do
+ class << self
+ attr_accessor :local_stored_attributes
+ end
+ end
+
+ module ClassMethods
+ def store(store_attribute, options = {})
+ serialize store_attribute, IndifferentCoder.new(store_attribute, options[:coder])
+ store_accessor(store_attribute, options[:accessors]) if options.has_key? :accessors
+ end
+
+ def store_accessor(store_attribute, *keys)
+ keys = keys.flatten
+
+ _store_accessors_module.module_eval do
+ keys.each do |key|
+ define_method("#{key}=") do |value|
+ write_store_attribute(store_attribute, key, value)
+ end
+
+ define_method(key) do
+ read_store_attribute(store_attribute, key)
+ end
+ end
+ end
+
+ # assign new store attribute and create new hash to ensure that each class in the hierarchy
+ # has its own hash of stored attributes.
+ self.local_stored_attributes ||= {}
+ self.local_stored_attributes[store_attribute] ||= []
+ self.local_stored_attributes[store_attribute] |= keys
+ end
+
+ def _store_accessors_module # :nodoc:
+ @_store_accessors_module ||= begin
+ mod = Module.new
+ include mod
+ mod
+ end
+ end
+
+ def stored_attributes
+ parent = superclass.respond_to?(:stored_attributes) ? superclass.stored_attributes : {}
+ if local_stored_attributes
+ parent.merge!(local_stored_attributes) { |k, a, b| a | b }
+ end
+ parent
+ end
+ end
+
+ private
+ def read_store_attribute(store_attribute, key) # :doc:
+ accessor = store_accessor_for(store_attribute)
+ accessor.read(self, store_attribute, key)
+ end
+
+ def write_store_attribute(store_attribute, key, value) # :doc:
+ accessor = store_accessor_for(store_attribute)
+ accessor.write(self, store_attribute, key, value)
+ end
+
+ def store_accessor_for(store_attribute)
+ type_for_attribute(store_attribute.to_s).accessor
+ end
+
+ class HashAccessor # :nodoc:
+ def self.read(object, attribute, key)
+ prepare(object, attribute)
+ object.public_send(attribute)[key]
+ end
+
+ def self.write(object, attribute, key, value)
+ prepare(object, attribute)
+ if value != read(object, attribute, key)
+ object.public_send :"#{attribute}_will_change!"
+ object.public_send(attribute)[key] = value
+ end
+ end
+
+ def self.prepare(object, attribute)
+ object.public_send :"#{attribute}=", {} unless object.send(attribute)
+ end
+ end
+
+ class StringKeyedHashAccessor < HashAccessor # :nodoc:
+ def self.read(object, attribute, key)
+ super object, attribute, key.to_s
+ end
+
+ def self.write(object, attribute, key, value)
+ super object, attribute, key.to_s, value
+ end
+ end
+
+ class IndifferentHashAccessor < ActiveRecord::Store::HashAccessor # :nodoc:
+ def self.prepare(object, store_attribute)
+ attribute = object.send(store_attribute)
+ unless attribute.is_a?(ActiveSupport::HashWithIndifferentAccess)
+ attribute = IndifferentCoder.as_indifferent_hash(attribute)
+ object.send :"#{store_attribute}=", attribute
+ end
+ attribute
+ end
+ end
+
+ class IndifferentCoder # :nodoc:
+ def initialize(attr_name, coder_or_class_name)
+ @coder =
+ if coder_or_class_name.respond_to?(:load) && coder_or_class_name.respond_to?(:dump)
+ coder_or_class_name
+ else
+ ActiveRecord::Coders::YAMLColumn.new(attr_name, coder_or_class_name || Object)
+ end
+ end
+
+ def dump(obj)
+ @coder.dump self.class.as_indifferent_hash(obj)
+ end
+
+ def load(yaml)
+ self.class.as_indifferent_hash(@coder.load(yaml || ""))
+ end
+
+ def self.as_indifferent_hash(obj)
+ case obj
+ when ActiveSupport::HashWithIndifferentAccess
+ obj
+ when Hash
+ obj.with_indifferent_access
+ else
+ ActiveSupport::HashWithIndifferentAccess.new
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/suppressor.rb b/activerecord/lib/active_record/suppressor.rb
new file mode 100644
index 0000000000..8cdb8e0765
--- /dev/null
+++ b/activerecord/lib/active_record/suppressor.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # ActiveRecord::Suppressor prevents the receiver from being saved during
+ # a given block.
+ #
+ # For example, here's a pattern of creating notifications when new comments
+ # are posted. (The notification may in turn trigger an email, a push
+ # notification, or just appear in the UI somewhere):
+ #
+ # class Comment < ActiveRecord::Base
+ # belongs_to :commentable, polymorphic: true
+ # after_create -> { Notification.create! comment: self,
+ # recipients: commentable.recipients }
+ # end
+ #
+ # That's what you want the bulk of the time. New comment creates a new
+ # Notification. But there may well be off cases, like copying a commentable
+ # and its comments, where you don't want that. So you'd have a concern
+ # something like this:
+ #
+ # module Copyable
+ # def copy_to(destination)
+ # Notification.suppress do
+ # # Copy logic that creates new comments that we do not want
+ # # triggering notifications.
+ # end
+ # end
+ # end
+ module Suppressor
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ def suppress(&block)
+ previous_state = SuppressorRegistry.suppressed[name]
+ SuppressorRegistry.suppressed[name] = true
+ yield
+ ensure
+ SuppressorRegistry.suppressed[name] = previous_state
+ end
+ end
+
+ def save(*) # :nodoc:
+ SuppressorRegistry.suppressed[self.class.name] ? true : super
+ end
+
+ def save!(*) # :nodoc:
+ SuppressorRegistry.suppressed[self.class.name] ? true : super
+ end
+ end
+
+ class SuppressorRegistry # :nodoc:
+ extend ActiveSupport::PerThreadRegistry
+
+ attr_reader :suppressed
+
+ def initialize
+ @suppressed = {}
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/table_metadata.rb b/activerecord/lib/active_record/table_metadata.rb
new file mode 100644
index 0000000000..71351449e1
--- /dev/null
+++ b/activerecord/lib/active_record/table_metadata.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class TableMetadata # :nodoc:
+ delegate :foreign_type, :foreign_key, to: :association, prefix: true
+ delegate :association_primary_key, to: :association
+
+ def initialize(klass, arel_table, association = nil)
+ @klass = klass
+ @arel_table = arel_table
+ @association = association
+ end
+
+ def resolve_column_aliases(hash)
+ new_hash = hash.dup
+ hash.each do |key, _|
+ if (key.is_a?(Symbol)) && klass.attribute_alias?(key)
+ new_hash[klass.attribute_alias(key)] = new_hash.delete(key)
+ end
+ end
+ new_hash
+ end
+
+ def arel_attribute(column_name)
+ if klass
+ klass.arel_attribute(column_name, arel_table)
+ else
+ arel_table[column_name]
+ end
+ end
+
+ def type(column_name)
+ if klass
+ klass.type_for_attribute(column_name.to_s)
+ else
+ Type.default_value
+ end
+ end
+
+ def has_column?(column_name)
+ klass && klass.columns_hash.key?(column_name.to_s)
+ end
+
+ def associated_with?(association_name)
+ klass && klass._reflect_on_association(association_name)
+ end
+
+ def associated_table(table_name)
+ association = klass._reflect_on_association(table_name) || klass._reflect_on_association(table_name.to_s.singularize)
+
+ if !association && table_name == arel_table.name
+ return self
+ elsif association && !association.polymorphic?
+ association_klass = association.klass
+ arel_table = association_klass.arel_table.alias(table_name)
+ else
+ type_caster = TypeCaster::Connection.new(klass, table_name)
+ association_klass = nil
+ arel_table = Arel::Table.new(table_name, type_caster: type_caster)
+ end
+
+ TableMetadata.new(association_klass, arel_table, association)
+ end
+
+ def polymorphic_association?
+ association && association.polymorphic?
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+
+ attr_reader :klass, :arel_table, :association
+ end
+end
diff --git a/activerecord/lib/active_record/tasks/database_tasks.rb b/activerecord/lib/active_record/tasks/database_tasks.rb
new file mode 100644
index 0000000000..0f3f84ca08
--- /dev/null
+++ b/activerecord/lib/active_record/tasks/database_tasks.rb
@@ -0,0 +1,327 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Tasks # :nodoc:
+ class DatabaseAlreadyExists < StandardError; end # :nodoc:
+ class DatabaseNotSupported < StandardError; end # :nodoc:
+
+ # ActiveRecord::Tasks::DatabaseTasks is a utility class, which encapsulates
+ # logic behind common tasks used to manage database and migrations.
+ #
+ # The tasks defined here are used with Rake tasks provided by Active Record.
+ #
+ # In order to use DatabaseTasks, a few config values need to be set. All the needed
+ # config values are set by Rails already, so it's necessary to do it only if you
+ # want to change the defaults or when you want to use Active Record outside of Rails
+ # (in such case after configuring the database tasks, you can also use the rake tasks
+ # defined in Active Record).
+ #
+ # The possible config values are:
+ #
+ # * +env+: current environment (like Rails.env).
+ # * +database_configuration+: configuration of your databases (as in +config/database.yml+).
+ # * +db_dir+: your +db+ directory.
+ # * +fixtures_path+: a path to fixtures directory.
+ # * +migrations_paths+: a list of paths to directories with migrations.
+ # * +seed_loader+: an object which will load seeds, it needs to respond to the +load_seed+ method.
+ # * +root+: a path to the root of the application.
+ #
+ # Example usage of DatabaseTasks outside Rails could look as such:
+ #
+ # include ActiveRecord::Tasks
+ # DatabaseTasks.database_configuration = YAML.load_file('my_database_config.yml')
+ # DatabaseTasks.db_dir = 'db'
+ # # other settings...
+ #
+ # DatabaseTasks.create_current('production')
+ module DatabaseTasks
+ ##
+ # :singleton-method:
+ # Extra flags passed to database CLI tool (mysqldump/pg_dump) when calling db:structure:dump
+ mattr_accessor :structure_dump_flags, instance_accessor: false
+
+ ##
+ # :singleton-method:
+ # Extra flags passed to database CLI tool when calling db:structure:load
+ mattr_accessor :structure_load_flags, instance_accessor: false
+
+ extend self
+
+ attr_writer :current_config, :db_dir, :migrations_paths, :fixtures_path, :root, :env, :seed_loader
+ attr_accessor :database_configuration
+
+ LOCAL_HOSTS = ["127.0.0.1", "localhost"]
+
+ def check_protected_environments!
+ unless ENV["DISABLE_DATABASE_ENVIRONMENT_CHECK"]
+ current = ActiveRecord::Migrator.current_environment
+ stored = ActiveRecord::Migrator.last_stored_environment
+
+ if ActiveRecord::Migrator.protected_environment?
+ raise ActiveRecord::ProtectedEnvironmentError.new(stored)
+ end
+
+ if stored && stored != current
+ raise ActiveRecord::EnvironmentMismatchError.new(current: current, stored: stored)
+ end
+ end
+ rescue ActiveRecord::NoDatabaseError
+ end
+
+ def register_task(pattern, task)
+ @tasks ||= {}
+ @tasks[pattern] = task
+ end
+
+ register_task(/mysql/, "ActiveRecord::Tasks::MySQLDatabaseTasks")
+ register_task(/postgresql/, "ActiveRecord::Tasks::PostgreSQLDatabaseTasks")
+ register_task(/sqlite/, "ActiveRecord::Tasks::SQLiteDatabaseTasks")
+
+ def db_dir
+ @db_dir ||= Rails.application.config.paths["db"].first
+ end
+
+ def migrations_paths
+ @migrations_paths ||= Rails.application.paths["db/migrate"].to_a
+ end
+
+ def fixtures_path
+ @fixtures_path ||= if ENV["FIXTURES_PATH"]
+ File.join(root, ENV["FIXTURES_PATH"])
+ else
+ File.join(root, "test", "fixtures")
+ end
+ end
+
+ def root
+ @root ||= Rails.root
+ end
+
+ def env
+ @env ||= Rails.env
+ end
+
+ def seed_loader
+ @seed_loader ||= Rails.application
+ end
+
+ def current_config(options = {})
+ options.reverse_merge! env: env
+ if options.has_key?(:config)
+ @current_config = options[:config]
+ else
+ @current_config ||= ActiveRecord::Base.configurations[options[:env]]
+ end
+ end
+
+ def create(*arguments)
+ configuration = arguments.first
+ class_for_adapter(configuration["adapter"]).new(*arguments).create
+ $stdout.puts "Created database '#{configuration['database']}'"
+ rescue DatabaseAlreadyExists
+ $stderr.puts "Database '#{configuration['database']}' already exists"
+ rescue Exception => error
+ $stderr.puts error
+ $stderr.puts "Couldn't create database for #{configuration.inspect}"
+ raise
+ end
+
+ def create_all
+ old_pool = ActiveRecord::Base.connection_handler.retrieve_connection_pool(ActiveRecord::Base.connection_specification_name)
+ each_local_configuration { |configuration| create configuration }
+ if old_pool
+ ActiveRecord::Base.connection_handler.establish_connection(old_pool.spec.to_hash)
+ end
+ end
+
+ def create_current(environment = env)
+ each_current_configuration(environment) { |configuration|
+ create configuration
+ }
+ ActiveRecord::Base.establish_connection(environment.to_sym)
+ end
+
+ def drop(*arguments)
+ configuration = arguments.first
+ class_for_adapter(configuration["adapter"]).new(*arguments).drop
+ $stdout.puts "Dropped database '#{configuration['database']}'"
+ rescue ActiveRecord::NoDatabaseError
+ $stderr.puts "Database '#{configuration['database']}' does not exist"
+ rescue Exception => error
+ $stderr.puts error
+ $stderr.puts "Couldn't drop database '#{configuration['database']}'"
+ raise
+ end
+
+ def drop_all
+ each_local_configuration { |configuration| drop configuration }
+ end
+
+ def drop_current(environment = env)
+ each_current_configuration(environment) { |configuration|
+ drop configuration
+ }
+ end
+
+ def migrate
+ raise "Empty VERSION provided" if ENV["VERSION"] && ENV["VERSION"].empty?
+
+ verbose = ENV["VERBOSE"] ? ENV["VERBOSE"] != "false" : true
+ version = ENV["VERSION"] ? ENV["VERSION"].to_i : nil
+ scope = ENV["SCOPE"]
+ verbose_was, Migration.verbose = Migration.verbose, verbose
+ Migrator.migrate(migrations_paths, version) do |migration|
+ scope.blank? || scope == migration.scope
+ end
+ ActiveRecord::Base.clear_cache!
+ ensure
+ Migration.verbose = verbose_was
+ end
+
+ def charset_current(environment = env)
+ charset ActiveRecord::Base.configurations[environment]
+ end
+
+ def charset(*arguments)
+ configuration = arguments.first
+ class_for_adapter(configuration["adapter"]).new(*arguments).charset
+ end
+
+ def collation_current(environment = env)
+ collation ActiveRecord::Base.configurations[environment]
+ end
+
+ def collation(*arguments)
+ configuration = arguments.first
+ class_for_adapter(configuration["adapter"]).new(*arguments).collation
+ end
+
+ def purge(configuration)
+ class_for_adapter(configuration["adapter"]).new(configuration).purge
+ end
+
+ def purge_all
+ each_local_configuration { |configuration|
+ purge configuration
+ }
+ end
+
+ def purge_current(environment = env)
+ each_current_configuration(environment) { |configuration|
+ purge configuration
+ }
+ ActiveRecord::Base.establish_connection(environment.to_sym)
+ end
+
+ def structure_dump(*arguments)
+ configuration = arguments.first
+ filename = arguments.delete_at 1
+ class_for_adapter(configuration["adapter"]).new(*arguments).structure_dump(filename, structure_dump_flags)
+ end
+
+ def structure_load(*arguments)
+ configuration = arguments.first
+ filename = arguments.delete_at 1
+ class_for_adapter(configuration["adapter"]).new(*arguments).structure_load(filename, structure_load_flags)
+ end
+
+ def load_schema(configuration, format = ActiveRecord::Base.schema_format, file = nil) # :nodoc:
+ file ||= schema_file(format)
+
+ case format
+ when :ruby
+ check_schema_file(file)
+ ActiveRecord::Base.establish_connection(configuration)
+ load(file)
+ when :sql
+ check_schema_file(file)
+ structure_load(configuration, file)
+ else
+ raise ArgumentError, "unknown format #{format.inspect}"
+ end
+ ActiveRecord::InternalMetadata.create_table
+ ActiveRecord::InternalMetadata[:environment] = ActiveRecord::Migrator.current_environment
+ end
+
+ def schema_file(format = ActiveRecord::Base.schema_format)
+ case format
+ when :ruby
+ File.join(db_dir, "schema.rb")
+ when :sql
+ File.join(db_dir, "structure.sql")
+ end
+ end
+
+ def load_schema_current(format = ActiveRecord::Base.schema_format, file = nil, environment = env)
+ each_current_configuration(environment) { |configuration|
+ load_schema configuration, format, file
+ }
+ ActiveRecord::Base.establish_connection(environment.to_sym)
+ end
+
+ def check_schema_file(filename)
+ unless File.exist?(filename)
+ message = %{#{filename} doesn't exist yet. Run `rails db:migrate` to create it, then try again.}.dup
+ message << %{ If you do not intend to use a database, you should instead alter #{Rails.root}/config/application.rb to limit the frameworks that will be loaded.} if defined?(::Rails.root)
+ Kernel.abort message
+ end
+ end
+
+ def load_seed
+ if seed_loader
+ seed_loader.load_seed
+ else
+ raise "You tried to load seed data, but no seed loader is specified. Please specify seed " \
+ "loader with ActiveRecord::Tasks::DatabaseTasks.seed_loader = your_seed_loader\n" \
+ "Seed loader should respond to load_seed method"
+ end
+ end
+
+ # Dumps the schema cache in YAML format for the connection into the file
+ #
+ # ==== Examples:
+ # ActiveRecord::Tasks::DatabaseTasks.dump_schema_cache(ActiveRecord::Base.connection, "tmp/schema_dump.yaml")
+ def dump_schema_cache(conn, filename)
+ conn.schema_cache.clear!
+ conn.data_sources.each { |table| conn.schema_cache.add(table) }
+ open(filename, "wb") { |f| f.write(YAML.dump(conn.schema_cache)) }
+ end
+
+ private
+
+ def class_for_adapter(adapter)
+ _key, task = @tasks.each_pair.detect { |pattern, _task| adapter[pattern] }
+ unless task
+ raise DatabaseNotSupported, "Rake tasks not supported by '#{adapter}' adapter"
+ end
+ task.is_a?(String) ? task.constantize : task
+ end
+
+ def each_current_configuration(environment)
+ environments = [environment]
+ environments << "test" if environment == "development"
+
+ configurations = ActiveRecord::Base.configurations.values_at(*environments)
+ configurations.compact.each do |configuration|
+ yield configuration unless configuration["database"].blank?
+ end
+ end
+
+ def each_local_configuration
+ ActiveRecord::Base.configurations.each_value do |configuration|
+ next unless configuration["database"]
+
+ if local_database?(configuration)
+ yield configuration
+ else
+ $stderr.puts "This task only modifies local databases. #{configuration['database']} is on a remote host."
+ end
+ end
+ end
+
+ def local_database?(configuration)
+ configuration["host"].blank? || LOCAL_HOSTS.include?(configuration["host"])
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/tasks/mysql_database_tasks.rb b/activerecord/lib/active_record/tasks/mysql_database_tasks.rb
new file mode 100644
index 0000000000..84265aa9e3
--- /dev/null
+++ b/activerecord/lib/active_record/tasks/mysql_database_tasks.rb
@@ -0,0 +1,162 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Tasks # :nodoc:
+ class MySQLDatabaseTasks # :nodoc:
+ ACCESS_DENIED_ERROR = 1045
+
+ delegate :connection, :establish_connection, to: ActiveRecord::Base
+
+ def initialize(configuration)
+ @configuration = configuration
+ end
+
+ def create
+ establish_connection configuration_without_database
+ connection.create_database configuration["database"], creation_options
+ establish_connection configuration
+ rescue ActiveRecord::StatementInvalid => error
+ if error.message.include?("database exists")
+ raise DatabaseAlreadyExists
+ else
+ raise
+ end
+ rescue error_class => error
+ if error.respond_to?(:errno) && error.errno == ACCESS_DENIED_ERROR
+ $stdout.print error.message
+ establish_connection root_configuration_without_database
+ connection.create_database configuration["database"], creation_options
+ if configuration["username"] != "root"
+ connection.execute grant_statement.gsub(/\s+/, " ").strip
+ end
+ establish_connection configuration
+ else
+ $stderr.puts error.inspect
+ $stderr.puts "Couldn't create database for #{configuration.inspect}, #{creation_options.inspect}"
+ $stderr.puts "(If you set the charset manually, make sure you have a matching collation)" if configuration["encoding"]
+ end
+ end
+
+ def drop
+ establish_connection configuration
+ connection.drop_database configuration["database"]
+ end
+
+ def purge
+ establish_connection configuration
+ connection.recreate_database configuration["database"], creation_options
+ end
+
+ def charset
+ connection.charset
+ end
+
+ def collation
+ connection.collation
+ end
+
+ def structure_dump(filename, extra_flags)
+ args = prepare_command_options
+ args.concat(["--result-file", "#{filename}"])
+ args.concat(["--no-data"])
+ args.concat(["--routines"])
+ args.concat(["--skip-comments"])
+
+ ignore_tables = ActiveRecord::SchemaDumper.ignore_tables
+ if ignore_tables.any?
+ args += ignore_tables.map { |table| "--ignore-table=#{configuration['database']}.#{table}" }
+ end
+
+ args.concat(["#{configuration['database']}"])
+ args.unshift(*extra_flags) if extra_flags
+
+ run_cmd("mysqldump", args, "dumping")
+ end
+
+ def structure_load(filename, extra_flags)
+ args = prepare_command_options
+ args.concat(["--execute", %{SET FOREIGN_KEY_CHECKS = 0; SOURCE #{filename}; SET FOREIGN_KEY_CHECKS = 1}])
+ args.concat(["--database", "#{configuration['database']}"])
+ args.unshift(*extra_flags) if extra_flags
+
+ run_cmd("mysql", args, "loading")
+ end
+
+ private
+
+ def configuration
+ @configuration
+ end
+
+ def configuration_without_database
+ configuration.merge("database" => nil)
+ end
+
+ def creation_options
+ Hash.new.tap do |options|
+ options[:charset] = configuration["encoding"] if configuration.include? "encoding"
+ options[:collation] = configuration["collation"] if configuration.include? "collation"
+ end
+ end
+
+ def error_class
+ if configuration["adapter"].include?("jdbc")
+ require_relative "../railties/jdbcmysql_error"
+ ArJdbcMySQL::Error
+ elsif defined?(Mysql2)
+ Mysql2::Error
+ else
+ StandardError
+ end
+ end
+
+ def grant_statement
+ <<-SQL
+GRANT ALL PRIVILEGES ON `#{configuration['database']}`.*
+ TO '#{configuration['username']}'@'localhost'
+IDENTIFIED BY '#{configuration['password']}' WITH GRANT OPTION;
+ SQL
+ end
+
+ def root_configuration_without_database
+ configuration_without_database.merge(
+ "username" => "root",
+ "password" => root_password
+ )
+ end
+
+ def root_password
+ $stdout.print "Please provide the root password for your MySQL installation\n>"
+ $stdin.gets.strip
+ end
+
+ def prepare_command_options
+ args = {
+ "host" => "--host",
+ "port" => "--port",
+ "socket" => "--socket",
+ "username" => "--user",
+ "password" => "--password",
+ "encoding" => "--default-character-set",
+ "sslca" => "--ssl-ca",
+ "sslcert" => "--ssl-cert",
+ "sslcapath" => "--ssl-capath",
+ "sslcipher" => "--ssl-cipher",
+ "sslkey" => "--ssl-key"
+ }.map { |opt, arg| "#{arg}=#{configuration[opt]}" if configuration[opt] }.compact
+
+ args
+ end
+
+ def run_cmd(cmd, args, action)
+ fail run_cmd_error(cmd, args, action) unless Kernel.system(cmd, *args)
+ end
+
+ def run_cmd_error(cmd, args, action)
+ msg = "failed to execute: `#{cmd}`\n".dup
+ msg << "Please check the output above for any errors and make sure that `#{cmd}` is installed in your PATH and has proper permissions.\n\n"
+ msg
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/tasks/postgresql_database_tasks.rb b/activerecord/lib/active_record/tasks/postgresql_database_tasks.rb
new file mode 100644
index 0000000000..a2e74efc2b
--- /dev/null
+++ b/activerecord/lib/active_record/tasks/postgresql_database_tasks.rb
@@ -0,0 +1,143 @@
+# frozen_string_literal: true
+
+require "tempfile"
+
+module ActiveRecord
+ module Tasks # :nodoc:
+ class PostgreSQLDatabaseTasks # :nodoc:
+ DEFAULT_ENCODING = ENV["CHARSET"] || "utf8"
+ ON_ERROR_STOP_1 = "ON_ERROR_STOP=1".freeze
+ SQL_COMMENT_BEGIN = "--".freeze
+
+ delegate :connection, :establish_connection, :clear_active_connections!,
+ to: ActiveRecord::Base
+
+ def initialize(configuration)
+ @configuration = configuration
+ end
+
+ def create(master_established = false)
+ establish_master_connection unless master_established
+ connection.create_database configuration["database"],
+ configuration.merge("encoding" => encoding)
+ establish_connection configuration
+ rescue ActiveRecord::StatementInvalid => error
+ if /database .* already exists/.match?(error.message)
+ raise DatabaseAlreadyExists
+ else
+ raise
+ end
+ end
+
+ def drop
+ establish_master_connection
+ connection.drop_database configuration["database"]
+ end
+
+ def charset
+ connection.encoding
+ end
+
+ def collation
+ connection.collation
+ end
+
+ def purge
+ clear_active_connections!
+ drop
+ create true
+ end
+
+ def structure_dump(filename, extra_flags)
+ set_psql_env
+
+ search_path = \
+ case ActiveRecord::Base.dump_schemas
+ when :schema_search_path
+ configuration["schema_search_path"]
+ when :all
+ nil
+ when String
+ ActiveRecord::Base.dump_schemas
+ end
+
+ args = ["-s", "-x", "-O", "-f", filename]
+ args.concat(Array(extra_flags)) if extra_flags
+ unless search_path.blank?
+ args += search_path.split(",").map do |part|
+ "--schema=#{part.strip}"
+ end
+ end
+
+ ignore_tables = ActiveRecord::SchemaDumper.ignore_tables
+ if ignore_tables.any?
+ args += ignore_tables.flat_map { |table| ["-T", table] }
+ end
+
+ args << configuration["database"]
+ run_cmd("pg_dump", args, "dumping")
+ remove_sql_header_comments(filename)
+ File.open(filename, "a") { |f| f << "SET search_path TO #{connection.schema_search_path};\n\n" }
+ end
+
+ def structure_load(filename, extra_flags)
+ set_psql_env
+ args = ["-v", ON_ERROR_STOP_1, "-q", "-f", filename]
+ args.concat(Array(extra_flags)) if extra_flags
+ args << configuration["database"]
+ run_cmd("psql", args, "loading")
+ end
+
+ private
+
+ def configuration
+ @configuration
+ end
+
+ def encoding
+ configuration["encoding"] || DEFAULT_ENCODING
+ end
+
+ def establish_master_connection
+ establish_connection configuration.merge(
+ "database" => "postgres",
+ "schema_search_path" => "public"
+ )
+ end
+
+ def set_psql_env
+ ENV["PGHOST"] = configuration["host"] if configuration["host"]
+ ENV["PGPORT"] = configuration["port"].to_s if configuration["port"]
+ ENV["PGPASSWORD"] = configuration["password"].to_s if configuration["password"]
+ ENV["PGUSER"] = configuration["username"].to_s if configuration["username"]
+ end
+
+ def run_cmd(cmd, args, action)
+ fail run_cmd_error(cmd, args, action) unless Kernel.system(cmd, *args)
+ end
+
+ def run_cmd_error(cmd, args, action)
+ msg = "failed to execute:\n"
+ msg << "#{cmd} #{args.join(' ')}\n\n"
+ msg << "Please check the output above for any errors and make sure that `#{cmd}` is installed in your PATH and has proper permissions.\n\n"
+ msg
+ end
+
+ def remove_sql_header_comments(filename)
+ removing_comments = true
+ tempfile = Tempfile.open("uncommented_structure.sql")
+ begin
+ File.foreach(filename) do |line|
+ unless removing_comments && (line.start_with?(SQL_COMMENT_BEGIN) || line.blank?)
+ tempfile << line
+ removing_comments = false
+ end
+ end
+ ensure
+ tempfile.close
+ end
+ FileUtils.mv(tempfile.path, filename)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/tasks/sqlite_database_tasks.rb b/activerecord/lib/active_record/tasks/sqlite_database_tasks.rb
new file mode 100644
index 0000000000..abdd6db64a
--- /dev/null
+++ b/activerecord/lib/active_record/tasks/sqlite_database_tasks.rb
@@ -0,0 +1,83 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Tasks # :nodoc:
+ class SQLiteDatabaseTasks # :nodoc:
+ delegate :connection, :establish_connection, to: ActiveRecord::Base
+
+ def initialize(configuration, root = ActiveRecord::Tasks::DatabaseTasks.root)
+ @configuration, @root = configuration, root
+ end
+
+ def create
+ raise DatabaseAlreadyExists if File.exist?(configuration["database"])
+
+ establish_connection configuration
+ connection
+ end
+
+ def drop
+ require "pathname"
+ path = Pathname.new configuration["database"]
+ file = path.absolute? ? path.to_s : File.join(root, path)
+
+ FileUtils.rm(file)
+ rescue Errno::ENOENT => error
+ raise NoDatabaseError.new(error.message)
+ end
+
+ def purge
+ drop
+ rescue NoDatabaseError
+ ensure
+ create
+ end
+
+ def charset
+ connection.encoding
+ end
+
+ def structure_dump(filename, extra_flags)
+ args = []
+ args.concat(Array(extra_flags)) if extra_flags
+ args << configuration["database"]
+
+ ignore_tables = ActiveRecord::SchemaDumper.ignore_tables
+ if ignore_tables.any?
+ condition = ignore_tables.map { |table| connection.quote(table) }.join(", ")
+ args << "SELECT sql FROM sqlite_master WHERE tbl_name NOT IN (#{condition}) ORDER BY tbl_name, type DESC, name"
+ else
+ args << ".schema"
+ end
+ run_cmd("sqlite3", args, filename)
+ end
+
+ def structure_load(filename, extra_flags)
+ dbfile = configuration["database"]
+ flags = extra_flags.join(" ") if extra_flags
+ `sqlite3 #{flags} #{dbfile} < "#{filename}"`
+ end
+
+ private
+
+ def configuration
+ @configuration
+ end
+
+ def root
+ @root
+ end
+
+ def run_cmd(cmd, args, out)
+ fail run_cmd_error(cmd, args) unless Kernel.system(cmd, *args, out: out)
+ end
+
+ def run_cmd_error(cmd, args)
+ msg = "failed to execute:\n"
+ msg << "#{cmd} #{args.join(' ')}\n\n"
+ msg << "Please check the output above for any errors and make sure that `#{cmd}` is installed in your PATH and has proper permissions.\n\n"
+ msg
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/timestamp.rb b/activerecord/lib/active_record/timestamp.rb
new file mode 100644
index 0000000000..5da3759e5a
--- /dev/null
+++ b/activerecord/lib/active_record/timestamp.rb
@@ -0,0 +1,146 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record \Timestamp
+ #
+ # Active Record automatically timestamps create and update operations if the
+ # table has fields named <tt>created_at/created_on</tt> or
+ # <tt>updated_at/updated_on</tt>.
+ #
+ # Timestamping can be turned off by setting:
+ #
+ # config.active_record.record_timestamps = false
+ #
+ # Timestamps are in UTC by default but you can use the local timezone by setting:
+ #
+ # config.active_record.default_timezone = :local
+ #
+ # == Time Zone aware attributes
+ #
+ # Active Record keeps all the <tt>datetime</tt> and <tt>time</tt> columns
+ # timezone aware. By default, these values are stored in the database as UTC
+ # and converted back to the current <tt>Time.zone</tt> when pulled from the database.
+ #
+ # This feature can be turned off completely by setting:
+ #
+ # config.active_record.time_zone_aware_attributes = false
+ #
+ # You can also specify that only <tt>datetime</tt> columns should be time-zone
+ # aware (while <tt>time</tt> should not) by setting:
+ #
+ # ActiveRecord::Base.time_zone_aware_types = [:datetime]
+ #
+ # You can also add database specific timezone aware types. For example, for PostgreSQL:
+ #
+ # ActiveRecord::Base.time_zone_aware_types += [:tsrange, :tstzrange]
+ #
+ # Finally, you can indicate specific attributes of a model for which time zone
+ # conversion should not applied, for instance by setting:
+ #
+ # class Topic < ActiveRecord::Base
+ # self.skip_time_zone_conversion_for_attributes = [:written_on]
+ # end
+ module Timestamp
+ extend ActiveSupport::Concern
+
+ included do
+ class_attribute :record_timestamps, default: true
+ end
+
+ def initialize_dup(other) # :nodoc:
+ super
+ clear_timestamp_attributes
+ end
+
+ class_methods do
+ private
+ def timestamp_attributes_for_create_in_model
+ timestamp_attributes_for_create.select { |c| column_names.include?(c) }
+ end
+
+ def timestamp_attributes_for_update_in_model
+ timestamp_attributes_for_update.select { |c| column_names.include?(c) }
+ end
+
+ def all_timestamp_attributes_in_model
+ timestamp_attributes_for_create_in_model + timestamp_attributes_for_update_in_model
+ end
+
+ def timestamp_attributes_for_create
+ ["created_at", "created_on"]
+ end
+
+ def timestamp_attributes_for_update
+ ["updated_at", "updated_on"]
+ end
+
+ def current_time_from_proper_timezone
+ default_timezone == :utc ? Time.now.utc : Time.now
+ end
+ end
+
+ private
+
+ def _create_record
+ if record_timestamps
+ current_time = current_time_from_proper_timezone
+
+ all_timestamp_attributes_in_model.each do |column|
+ if !attribute_present?(column)
+ _write_attribute(column, current_time)
+ end
+ end
+ end
+
+ super
+ end
+
+ def _update_record(*args, touch: true, **options)
+ if touch && should_record_timestamps?
+ current_time = current_time_from_proper_timezone
+
+ timestamp_attributes_for_update_in_model.each do |column|
+ next if will_save_change_to_attribute?(column)
+ _write_attribute(column, current_time)
+ end
+ end
+ super(*args)
+ end
+
+ def should_record_timestamps?
+ record_timestamps && (!partial_writes? || has_changes_to_save?)
+ end
+
+ def timestamp_attributes_for_create_in_model
+ self.class.send(:timestamp_attributes_for_create_in_model)
+ end
+
+ def timestamp_attributes_for_update_in_model
+ self.class.send(:timestamp_attributes_for_update_in_model)
+ end
+
+ def all_timestamp_attributes_in_model
+ self.class.send(:all_timestamp_attributes_in_model)
+ end
+
+ def current_time_from_proper_timezone
+ self.class.send(:current_time_from_proper_timezone)
+ end
+
+ def max_updated_column_timestamp(timestamp_names = timestamp_attributes_for_update_in_model)
+ timestamp_names
+ .map { |attr| self[attr] }
+ .compact
+ .map(&:to_time)
+ .max
+ end
+
+ # Clear attributes and changed_attributes
+ def clear_timestamp_attributes
+ all_timestamp_attributes_in_model.each do |attribute_name|
+ self[attribute_name] = nil
+ clear_attribute_changes([attribute_name])
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/touch_later.rb b/activerecord/lib/active_record/touch_later.rb
new file mode 100644
index 0000000000..f70b7c50a2
--- /dev/null
+++ b/activerecord/lib/active_record/touch_later.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record Touch Later
+ module TouchLater
+ extend ActiveSupport::Concern
+
+ included do
+ before_commit_without_transaction_enrollment :touch_deferred_attributes
+ end
+
+ def touch_later(*names) # :nodoc:
+ unless persisted?
+ raise ActiveRecordError, <<-MSG.squish
+ cannot touch on a new or destroyed record object. Consider using
+ persisted?, new_record?, or destroyed? before touching
+ MSG
+ end
+
+ @_defer_touch_attrs ||= timestamp_attributes_for_update_in_model
+ @_defer_touch_attrs |= names
+ @_touch_time = current_time_from_proper_timezone
+
+ surreptitiously_touch @_defer_touch_attrs
+ self.class.connection.add_transaction_record self
+
+ # touch the parents as we are not calling the after_save callbacks
+ self.class.reflect_on_all_associations(:belongs_to).each do |r|
+ if touch = r.options[:touch]
+ ActiveRecord::Associations::Builder::BelongsTo.touch_record(self, changes_to_save, r.foreign_key, r.name, touch, :touch_later)
+ end
+ end
+ end
+
+ def touch(*names, time: nil) # :nodoc:
+ if has_defer_touch_attrs?
+ names |= @_defer_touch_attrs
+ end
+ super(*names, time: time)
+ end
+
+ private
+
+ def surreptitiously_touch(attrs)
+ attrs.each { |attr| write_attribute attr, @_touch_time }
+ clear_attribute_changes attrs
+ end
+
+ def touch_deferred_attributes
+ if has_defer_touch_attrs? && persisted?
+ touch(*@_defer_touch_attrs, time: @_touch_time)
+ @_defer_touch_attrs, @_touch_time = nil, nil
+ end
+ end
+
+ def has_defer_touch_attrs?
+ defined?(@_defer_touch_attrs) && @_defer_touch_attrs.present?
+ end
+
+ def belongs_to_touch_method
+ :touch_later
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/transactions.rb b/activerecord/lib/active_record/transactions.rb
new file mode 100644
index 0000000000..3e8a0789df
--- /dev/null
+++ b/activerecord/lib/active_record/transactions.rb
@@ -0,0 +1,499 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # See ActiveRecord::Transactions::ClassMethods for documentation.
+ module Transactions
+ extend ActiveSupport::Concern
+ #:nodoc:
+ ACTIONS = [:create, :destroy, :update]
+
+ included do
+ define_callbacks :commit, :rollback,
+ :before_commit,
+ :before_commit_without_transaction_enrollment,
+ :commit_without_transaction_enrollment,
+ :rollback_without_transaction_enrollment,
+ scope: [:kind, :name]
+ end
+
+ # = Active Record Transactions
+ #
+ # \Transactions are protective blocks where SQL statements are only permanent
+ # if they can all succeed as one atomic action. The classic example is a
+ # transfer between two accounts where you can only have a deposit if the
+ # withdrawal succeeded and vice versa. \Transactions enforce the integrity of
+ # the database and guard the data against program errors or database
+ # break-downs. So basically you should use transaction blocks whenever you
+ # have a number of statements that must be executed together or not at all.
+ #
+ # For example:
+ #
+ # ActiveRecord::Base.transaction do
+ # david.withdrawal(100)
+ # mary.deposit(100)
+ # end
+ #
+ # This example will only take money from David and give it to Mary if neither
+ # +withdrawal+ nor +deposit+ raise an exception. Exceptions will force a
+ # ROLLBACK that returns the database to the state before the transaction
+ # began. Be aware, though, that the objects will _not_ have their instance
+ # data returned to their pre-transactional state.
+ #
+ # == Different Active Record classes in a single transaction
+ #
+ # Though the #transaction class method is called on some Active Record class,
+ # the objects within the transaction block need not all be instances of
+ # that class. This is because transactions are per-database connection, not
+ # per-model.
+ #
+ # In this example a +balance+ record is transactionally saved even
+ # though #transaction is called on the +Account+ class:
+ #
+ # Account.transaction do
+ # balance.save!
+ # account.save!
+ # end
+ #
+ # The #transaction method is also available as a model instance method.
+ # For example, you can also do this:
+ #
+ # balance.transaction do
+ # balance.save!
+ # account.save!
+ # end
+ #
+ # == Transactions are not distributed across database connections
+ #
+ # A transaction acts on a single database connection. If you have
+ # multiple class-specific databases, the transaction will not protect
+ # interaction among them. One workaround is to begin a transaction
+ # on each class whose models you alter:
+ #
+ # Student.transaction do
+ # Course.transaction do
+ # course.enroll(student)
+ # student.units += course.units
+ # end
+ # end
+ #
+ # This is a poor solution, but fully distributed transactions are beyond
+ # the scope of Active Record.
+ #
+ # == +save+ and +destroy+ are automatically wrapped in a transaction
+ #
+ # Both {#save}[rdoc-ref:Persistence#save] and
+ # {#destroy}[rdoc-ref:Persistence#destroy] come wrapped in a transaction that ensures
+ # that whatever you do in validations or callbacks will happen under its
+ # protected cover. So you can use validations to check for values that
+ # the transaction depends on or you can raise exceptions in the callbacks
+ # to rollback, including <tt>after_*</tt> callbacks.
+ #
+ # As a consequence changes to the database are not seen outside your connection
+ # until the operation is complete. For example, if you try to update the index
+ # of a search engine in +after_save+ the indexer won't see the updated record.
+ # The #after_commit callback is the only one that is triggered once the update
+ # is committed. See below.
+ #
+ # == Exception handling and rolling back
+ #
+ # Also have in mind that exceptions thrown within a transaction block will
+ # be propagated (after triggering the ROLLBACK), so you should be ready to
+ # catch those in your application code.
+ #
+ # One exception is the ActiveRecord::Rollback exception, which will trigger
+ # a ROLLBACK when raised, but not be re-raised by the transaction block.
+ #
+ # *Warning*: one should not catch ActiveRecord::StatementInvalid exceptions
+ # inside a transaction block. ActiveRecord::StatementInvalid exceptions indicate that an
+ # error occurred at the database level, for example when a unique constraint
+ # is violated. On some database systems, such as PostgreSQL, database errors
+ # inside a transaction cause the entire transaction to become unusable
+ # until it's restarted from the beginning. Here is an example which
+ # demonstrates the problem:
+ #
+ # # Suppose that we have a Number model with a unique column called 'i'.
+ # Number.transaction do
+ # Number.create(i: 0)
+ # begin
+ # # This will raise a unique constraint error...
+ # Number.create(i: 0)
+ # rescue ActiveRecord::StatementInvalid
+ # # ...which we ignore.
+ # end
+ #
+ # # On PostgreSQL, the transaction is now unusable. The following
+ # # statement will cause a PostgreSQL error, even though the unique
+ # # constraint is no longer violated:
+ # Number.create(i: 1)
+ # # => "PG::Error: ERROR: current transaction is aborted, commands
+ # # ignored until end of transaction block"
+ # end
+ #
+ # One should restart the entire transaction if an
+ # ActiveRecord::StatementInvalid occurred.
+ #
+ # == Nested transactions
+ #
+ # #transaction calls can be nested. By default, this makes all database
+ # statements in the nested transaction block become part of the parent
+ # transaction. For example, the following behavior may be surprising:
+ #
+ # User.transaction do
+ # User.create(username: 'Kotori')
+ # User.transaction do
+ # User.create(username: 'Nemu')
+ # raise ActiveRecord::Rollback
+ # end
+ # end
+ #
+ # creates both "Kotori" and "Nemu". Reason is the ActiveRecord::Rollback
+ # exception in the nested block does not issue a ROLLBACK. Since these exceptions
+ # are captured in transaction blocks, the parent block does not see it and the
+ # real transaction is committed.
+ #
+ # In order to get a ROLLBACK for the nested transaction you may ask for a real
+ # sub-transaction by passing <tt>requires_new: true</tt>. If anything goes wrong,
+ # the database rolls back to the beginning of the sub-transaction without rolling
+ # back the parent transaction. If we add it to the previous example:
+ #
+ # User.transaction do
+ # User.create(username: 'Kotori')
+ # User.transaction(requires_new: true) do
+ # User.create(username: 'Nemu')
+ # raise ActiveRecord::Rollback
+ # end
+ # end
+ #
+ # only "Kotori" is created. This works on MySQL and PostgreSQL. SQLite3 version >= '3.6.8' also supports it.
+ #
+ # Most databases don't support true nested transactions. At the time of
+ # writing, the only database that we're aware of that supports true nested
+ # transactions, is MS-SQL. Because of this, Active Record emulates nested
+ # transactions by using savepoints on MySQL and PostgreSQL. See
+ # http://dev.mysql.com/doc/refman/5.7/en/savepoint.html
+ # for more information about savepoints.
+ #
+ # === \Callbacks
+ #
+ # There are two types of callbacks associated with committing and rolling back transactions:
+ # #after_commit and #after_rollback.
+ #
+ # #after_commit callbacks are called on every record saved or destroyed within a
+ # transaction immediately after the transaction is committed. #after_rollback callbacks
+ # are called on every record saved or destroyed within a transaction immediately after the
+ # transaction or savepoint is rolled back.
+ #
+ # These callbacks are useful for interacting with other systems since you will be guaranteed
+ # that the callback is only executed when the database is in a permanent state. For example,
+ # #after_commit is a good spot to put in a hook to clearing a cache since clearing it from
+ # within a transaction could trigger the cache to be regenerated before the database is updated.
+ #
+ # === Caveats
+ #
+ # If you're on MySQL, then do not use Data Definition Language(DDL) operations in nested
+ # transactions blocks that are emulated with savepoints. That is, do not execute statements
+ # like 'CREATE TABLE' inside such blocks. This is because MySQL automatically
+ # releases all savepoints upon executing a DDL operation. When +transaction+
+ # is finished and tries to release the savepoint it created earlier, a
+ # database error will occur because the savepoint has already been
+ # automatically released. The following example demonstrates the problem:
+ #
+ # Model.connection.transaction do # BEGIN
+ # Model.connection.transaction(requires_new: true) do # CREATE SAVEPOINT active_record_1
+ # Model.connection.create_table(...) # active_record_1 now automatically released
+ # end # RELEASE SAVEPOINT active_record_1
+ # # ^^^^ BOOM! database error!
+ # end
+ #
+ # Note that "TRUNCATE" is also a MySQL DDL statement!
+ module ClassMethods
+ # See the ConnectionAdapters::DatabaseStatements#transaction API docs.
+ def transaction(options = {}, &block)
+ connection.transaction(options, &block)
+ end
+
+ def before_commit(*args, &block) # :nodoc:
+ set_options_for_callbacks!(args)
+ set_callback(:before_commit, :before, *args, &block)
+ end
+
+ # This callback is called after a record has been created, updated, or destroyed.
+ #
+ # You can specify that the callback should only be fired by a certain action with
+ # the +:on+ option:
+ #
+ # after_commit :do_foo, on: :create
+ # after_commit :do_bar, on: :update
+ # after_commit :do_baz, on: :destroy
+ #
+ # after_commit :do_foo_bar, on: [:create, :update]
+ # after_commit :do_bar_baz, on: [:update, :destroy]
+ #
+ def after_commit(*args, &block)
+ set_options_for_callbacks!(args)
+ set_callback(:commit, :after, *args, &block)
+ end
+
+ # Shortcut for <tt>after_commit :hook, on: :create</tt>.
+ def after_create_commit(*args, &block)
+ set_options_for_callbacks!(args, on: :create)
+ set_callback(:commit, :after, *args, &block)
+ end
+
+ # Shortcut for <tt>after_commit :hook, on: :update</tt>.
+ def after_update_commit(*args, &block)
+ set_options_for_callbacks!(args, on: :update)
+ set_callback(:commit, :after, *args, &block)
+ end
+
+ # Shortcut for <tt>after_commit :hook, on: :destroy</tt>.
+ def after_destroy_commit(*args, &block)
+ set_options_for_callbacks!(args, on: :destroy)
+ set_callback(:commit, :after, *args, &block)
+ end
+
+ # This callback is called after a create, update, or destroy are rolled back.
+ #
+ # Please check the documentation of #after_commit for options.
+ def after_rollback(*args, &block)
+ set_options_for_callbacks!(args)
+ set_callback(:rollback, :after, *args, &block)
+ end
+
+ def before_commit_without_transaction_enrollment(*args, &block) # :nodoc:
+ set_options_for_callbacks!(args)
+ set_callback(:before_commit_without_transaction_enrollment, :before, *args, &block)
+ end
+
+ def after_commit_without_transaction_enrollment(*args, &block) # :nodoc:
+ set_options_for_callbacks!(args)
+ set_callback(:commit_without_transaction_enrollment, :after, *args, &block)
+ end
+
+ def after_rollback_without_transaction_enrollment(*args, &block) # :nodoc:
+ set_options_for_callbacks!(args)
+ set_callback(:rollback_without_transaction_enrollment, :after, *args, &block)
+ end
+
+ private
+
+ def set_options_for_callbacks!(args, enforced_options = {})
+ options = args.extract_options!.merge!(enforced_options)
+ args << options
+
+ if options[:on]
+ fire_on = Array(options[:on])
+ assert_valid_transaction_action(fire_on)
+ options[:if] = Array(options[:if])
+ options[:if].unshift("transaction_include_any_action?(#{fire_on})")
+ end
+ end
+
+ def assert_valid_transaction_action(actions)
+ if (actions - ACTIONS).any?
+ raise ArgumentError, ":on conditions for after_commit and after_rollback callbacks have to be one of #{ACTIONS}"
+ end
+ end
+ end
+
+ # See ActiveRecord::Transactions::ClassMethods for detailed documentation.
+ def transaction(options = {}, &block)
+ self.class.transaction(options, &block)
+ end
+
+ def destroy #:nodoc:
+ with_transaction_returning_status { super }
+ end
+
+ def save(*) #:nodoc:
+ rollback_active_record_state! do
+ with_transaction_returning_status { super }
+ end
+ end
+
+ def save!(*) #:nodoc:
+ with_transaction_returning_status { super }
+ end
+
+ def touch(*) #:nodoc:
+ with_transaction_returning_status { super }
+ end
+
+ # Reset id and @new_record if the transaction rolls back.
+ def rollback_active_record_state!
+ remember_transaction_record_state
+ yield
+ rescue Exception
+ restore_transaction_record_state
+ raise
+ ensure
+ clear_transaction_record_state
+ end
+
+ def before_committed! # :nodoc:
+ _run_before_commit_without_transaction_enrollment_callbacks
+ _run_before_commit_callbacks
+ end
+
+ # Call the #after_commit callbacks.
+ #
+ # Ensure that it is not called if the object was never persisted (failed create),
+ # but call it after the commit of a destroyed object.
+ def committed!(should_run_callbacks: true) #:nodoc:
+ if should_run_callbacks && destroyed? || persisted?
+ _run_commit_without_transaction_enrollment_callbacks
+ _run_commit_callbacks
+ end
+ ensure
+ force_clear_transaction_record_state
+ end
+
+ # Call the #after_rollback callbacks. The +force_restore_state+ argument indicates if the record
+ # state should be rolled back to the beginning or just to the last savepoint.
+ def rolledback!(force_restore_state: false, should_run_callbacks: true) #:nodoc:
+ if should_run_callbacks
+ _run_rollback_callbacks
+ _run_rollback_without_transaction_enrollment_callbacks
+ end
+ ensure
+ restore_transaction_record_state(force_restore_state)
+ clear_transaction_record_state
+ end
+
+ # Add the record to the current transaction so that the #after_rollback and #after_commit callbacks
+ # can be called.
+ def add_to_transaction
+ if has_transactional_callbacks?
+ self.class.connection.add_transaction_record(self)
+ else
+ sync_with_transaction_state
+ set_transaction_state(self.class.connection.transaction_state)
+ end
+ remember_transaction_record_state
+ end
+
+ # Executes +method+ within a transaction and captures its return value as a
+ # status flag. If the status is true the transaction is committed, otherwise
+ # a ROLLBACK is issued. In any case the status flag is returned.
+ #
+ # This method is available within the context of an ActiveRecord::Base
+ # instance.
+ def with_transaction_returning_status
+ status = nil
+ self.class.transaction do
+ add_to_transaction
+ begin
+ status = yield
+ rescue ActiveRecord::Rollback
+ clear_transaction_record_state
+ status = nil
+ end
+
+ raise ActiveRecord::Rollback unless status
+ end
+ status
+ ensure
+ if @transaction_state && @transaction_state.committed?
+ clear_transaction_record_state
+ end
+ end
+
+ private
+
+ # Save the new record state and id of a record so it can be restored later if a transaction fails.
+ def remember_transaction_record_state
+ @_start_transaction_state[:id] = id
+ @_start_transaction_state.reverse_merge!(
+ new_record: @new_record,
+ destroyed: @destroyed,
+ frozen?: frozen?,
+ )
+ @_start_transaction_state[:level] = (@_start_transaction_state[:level] || 0) + 1
+ end
+
+ # Clear the new record state and id of a record.
+ def clear_transaction_record_state
+ @_start_transaction_state[:level] = (@_start_transaction_state[:level] || 0) - 1
+ force_clear_transaction_record_state if @_start_transaction_state[:level] < 1
+ end
+
+ # Force to clear the transaction record state.
+ def force_clear_transaction_record_state
+ @_start_transaction_state.clear
+ end
+
+ # Restore the new record state and id of a record that was previously saved by a call to save_record_state.
+ def restore_transaction_record_state(force = false)
+ unless @_start_transaction_state.empty?
+ transaction_level = (@_start_transaction_state[:level] || 0) - 1
+ if transaction_level < 1 || force
+ restore_state = @_start_transaction_state
+ thaw
+ @new_record = restore_state[:new_record]
+ @destroyed = restore_state[:destroyed]
+ pk = self.class.primary_key
+ if pk && read_attribute(pk) != restore_state[:id]
+ write_attribute(pk, restore_state[:id])
+ end
+ freeze if restore_state[:frozen?]
+ end
+ end
+ end
+
+ # Determine if a record was created or destroyed in a transaction. State should be one of :new_record or :destroyed.
+ def transaction_record_state(state)
+ @_start_transaction_state[state]
+ end
+
+ # Determine if a transaction included an action for :create, :update, or :destroy. Used in filtering callbacks.
+ def transaction_include_any_action?(actions)
+ actions.any? do |action|
+ case action
+ when :create
+ transaction_record_state(:new_record)
+ when :destroy
+ defined?(@_trigger_destroy_callback) && @_trigger_destroy_callback
+ when :update
+ !(transaction_record_state(:new_record) || destroyed?) &&
+ (defined?(@_trigger_update_callback) && @_trigger_update_callback)
+ end
+ end
+ end
+
+ def set_transaction_state(state)
+ @transaction_state = state
+ end
+
+ def has_transactional_callbacks?
+ !_rollback_callbacks.empty? || !_commit_callbacks.empty? || !_before_commit_callbacks.empty?
+ end
+
+ # Updates the attributes on this particular Active Record object so that
+ # if it's associated with a transaction, then the state of the Active Record
+ # object will be updated to reflect the current state of the transaction.
+ #
+ # The +@transaction_state+ variable stores the states of the associated
+ # transaction. This relies on the fact that a transaction can only be in
+ # one rollback or commit (otherwise a list of states would be required).
+ # Each Active Record object inside of a transaction carries that transaction's
+ # TransactionState.
+ #
+ # This method checks to see if the ActiveRecord object's state reflects
+ # the TransactionState, and rolls back or commits the Active Record object
+ # as appropriate.
+ #
+ # Since Active Record objects can be inside multiple transactions, this
+ # method recursively goes through the parent of the TransactionState and
+ # checks if the Active Record object reflects the state of the object.
+ def sync_with_transaction_state
+ update_attributes_from_transaction_state(@transaction_state)
+ end
+
+ def update_attributes_from_transaction_state(transaction_state)
+ if transaction_state && transaction_state.finalized?
+ restore_transaction_record_state if transaction_state.rolledback?
+ clear_transaction_record_state if transaction_state.fully_completed?
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/translation.rb b/activerecord/lib/active_record/translation.rb
new file mode 100644
index 0000000000..3cf70eafb8
--- /dev/null
+++ b/activerecord/lib/active_record/translation.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Translation
+ include ActiveModel::Translation
+
+ # Set the lookup ancestors for ActiveModel.
+ def lookup_ancestors #:nodoc:
+ klass = self
+ classes = [klass]
+ return classes if klass == ActiveRecord::Base
+
+ while klass != klass.base_class
+ classes << klass = klass.superclass
+ end
+ classes
+ end
+
+ # Set the i18n scope to overwrite ActiveModel.
+ def i18n_scope #:nodoc:
+ :activerecord
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type.rb b/activerecord/lib/active_record/type.rb
new file mode 100644
index 0000000000..fa22df92b8
--- /dev/null
+++ b/activerecord/lib/active_record/type.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require "active_model/type"
+
+require_relative "type/internal/timezone"
+
+require_relative "type/date"
+require_relative "type/date_time"
+require_relative "type/decimal_without_scale"
+require_relative "type/json"
+require_relative "type/time"
+require_relative "type/text"
+require_relative "type/unsigned_integer"
+
+require_relative "type/serialized"
+require_relative "type/adapter_specific_registry"
+
+require_relative "type/type_map"
+require_relative "type/hash_lookup_type_map"
+
+module ActiveRecord
+ module Type
+ @registry = AdapterSpecificRegistry.new
+
+ class << self
+ attr_accessor :registry # :nodoc:
+ delegate :add_modifier, to: :registry
+
+ # Add a new type to the registry, allowing it to be referenced as a
+ # symbol by {ActiveRecord::Base.attribute}[rdoc-ref:Attributes::ClassMethods#attribute].
+ # If your type is only meant to be used with a specific database adapter, you can
+ # do so by passing <tt>adapter: :postgresql</tt>. If your type has the same
+ # name as a native type for the current adapter, an exception will be
+ # raised unless you specify an +:override+ option. <tt>override: true</tt> will
+ # cause your type to be used instead of the native type. <tt>override:
+ # false</tt> will cause the native type to be used over yours if one exists.
+ def register(type_name, klass = nil, **options, &block)
+ registry.register(type_name, klass, **options, &block)
+ end
+
+ def lookup(*args, adapter: current_adapter_name, **kwargs) # :nodoc:
+ registry.lookup(*args, adapter: adapter, **kwargs)
+ end
+
+ def default_value # :nodoc:
+ @default_value ||= Value.new
+ end
+
+ private
+
+ def current_adapter_name
+ ActiveRecord::Base.connection.adapter_name.downcase.to_sym
+ end
+ end
+
+ Helpers = ActiveModel::Type::Helpers
+ BigInteger = ActiveModel::Type::BigInteger
+ Binary = ActiveModel::Type::Binary
+ Boolean = ActiveModel::Type::Boolean
+ Decimal = ActiveModel::Type::Decimal
+ Float = ActiveModel::Type::Float
+ Integer = ActiveModel::Type::Integer
+ String = ActiveModel::Type::String
+ Value = ActiveModel::Type::Value
+
+ register(:big_integer, Type::BigInteger, override: false)
+ register(:binary, Type::Binary, override: false)
+ register(:boolean, Type::Boolean, override: false)
+ register(:date, Type::Date, override: false)
+ register(:datetime, Type::DateTime, override: false)
+ register(:decimal, Type::Decimal, override: false)
+ register(:float, Type::Float, override: false)
+ register(:integer, Type::Integer, override: false)
+ register(:json, Type::Json, override: false)
+ register(:string, Type::String, override: false)
+ register(:text, Type::Text, override: false)
+ register(:time, Type::Time, override: false)
+ end
+end
diff --git a/activerecord/lib/active_record/type/adapter_specific_registry.rb b/activerecord/lib/active_record/type/adapter_specific_registry.rb
new file mode 100644
index 0000000000..e7468aa542
--- /dev/null
+++ b/activerecord/lib/active_record/type/adapter_specific_registry.rb
@@ -0,0 +1,136 @@
+# frozen_string_literal: true
+
+require "active_model/type/registry"
+
+module ActiveRecord
+ # :stopdoc:
+ module Type
+ class AdapterSpecificRegistry < ActiveModel::Type::Registry
+ def add_modifier(options, klass, **args)
+ registrations << DecorationRegistration.new(options, klass, **args)
+ end
+
+ private
+
+ def registration_klass
+ Registration
+ end
+
+ def find_registration(symbol, *args)
+ registrations
+ .select { |registration| registration.matches?(symbol, *args) }
+ .max
+ end
+ end
+
+ class Registration
+ def initialize(name, block, adapter: nil, override: nil)
+ @name = name
+ @block = block
+ @adapter = adapter
+ @override = override
+ end
+
+ def call(_registry, *args, adapter: nil, **kwargs)
+ if kwargs.any? # https://bugs.ruby-lang.org/issues/10856
+ block.call(*args, **kwargs)
+ else
+ block.call(*args)
+ end
+ end
+
+ def matches?(type_name, *args, **kwargs)
+ type_name == name && matches_adapter?(**kwargs)
+ end
+
+ def <=>(other)
+ if conflicts_with?(other)
+ raise TypeConflictError.new("Type #{name} was registered for all
+ adapters, but shadows a native type with
+ the same name for #{other.adapter}".squish)
+ end
+ priority <=> other.priority
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+
+ attr_reader :name, :block, :adapter, :override
+
+ def priority
+ result = 0
+ if adapter
+ result |= 1
+ end
+ if override
+ result |= 2
+ end
+ result
+ end
+
+ def priority_except_adapter
+ priority & 0b111111100
+ end
+
+ private
+
+ def matches_adapter?(adapter: nil, **)
+ (self.adapter.nil? || adapter == self.adapter)
+ end
+
+ def conflicts_with?(other)
+ same_priority_except_adapter?(other) &&
+ has_adapter_conflict?(other)
+ end
+
+ def same_priority_except_adapter?(other)
+ priority_except_adapter == other.priority_except_adapter
+ end
+
+ def has_adapter_conflict?(other)
+ (override.nil? && other.adapter) ||
+ (adapter && other.override.nil?)
+ end
+ end
+
+ class DecorationRegistration < Registration
+ def initialize(options, klass, adapter: nil)
+ @options = options
+ @klass = klass
+ @adapter = adapter
+ end
+
+ def call(registry, *args, **kwargs)
+ subtype = registry.lookup(*args, **kwargs.except(*options.keys))
+ klass.new(subtype)
+ end
+
+ def matches?(*args, **kwargs)
+ matches_adapter?(**kwargs) && matches_options?(**kwargs)
+ end
+
+ def priority
+ super | 4
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+
+ attr_reader :options, :klass
+
+ private
+
+ def matches_options?(**kwargs)
+ options.all? do |key, value|
+ kwargs[key] == value
+ end
+ end
+ end
+ end
+
+ class TypeConflictError < StandardError
+ end
+ # :startdoc:
+end
diff --git a/activerecord/lib/active_record/type/date.rb b/activerecord/lib/active_record/type/date.rb
new file mode 100644
index 0000000000..8177074a20
--- /dev/null
+++ b/activerecord/lib/active_record/type/date.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Type
+ class Date < ActiveModel::Type::Date
+ include Internal::Timezone
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/date_time.rb b/activerecord/lib/active_record/type/date_time.rb
new file mode 100644
index 0000000000..4acde6b9f8
--- /dev/null
+++ b/activerecord/lib/active_record/type/date_time.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Type
+ class DateTime < ActiveModel::Type::DateTime
+ include Internal::Timezone
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/decimal_without_scale.rb b/activerecord/lib/active_record/type/decimal_without_scale.rb
new file mode 100644
index 0000000000..a207940dc7
--- /dev/null
+++ b/activerecord/lib/active_record/type/decimal_without_scale.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Type
+ class DecimalWithoutScale < ActiveModel::Type::BigInteger # :nodoc:
+ def type
+ :decimal
+ end
+
+ def type_cast_for_schema(value)
+ value.to_s.inspect
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/hash_lookup_type_map.rb b/activerecord/lib/active_record/type/hash_lookup_type_map.rb
new file mode 100644
index 0000000000..db9853fbcc
--- /dev/null
+++ b/activerecord/lib/active_record/type/hash_lookup_type_map.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Type
+ class HashLookupTypeMap < TypeMap # :nodoc:
+ def alias_type(type, alias_type)
+ register_type(type) { |_, *args| lookup(alias_type, *args) }
+ end
+
+ def key?(key)
+ @mapping.key?(key)
+ end
+
+ def keys
+ @mapping.keys
+ end
+
+ private
+
+ def perform_fetch(type, *args, &block)
+ @mapping.fetch(type, block).call(type, *args)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/internal/timezone.rb b/activerecord/lib/active_record/type/internal/timezone.rb
new file mode 100644
index 0000000000..3059755752
--- /dev/null
+++ b/activerecord/lib/active_record/type/internal/timezone.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Type
+ module Internal
+ module Timezone
+ def is_utc?
+ ActiveRecord::Base.default_timezone == :utc
+ end
+
+ def default_timezone
+ ActiveRecord::Base.default_timezone
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/json.rb b/activerecord/lib/active_record/type/json.rb
new file mode 100644
index 0000000000..3f9ff22796
--- /dev/null
+++ b/activerecord/lib/active_record/type/json.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Type
+ class Json < ActiveModel::Type::Value
+ include ActiveModel::Type::Helpers::Mutable
+
+ def type
+ :json
+ end
+
+ def deserialize(value)
+ return value unless value.is_a?(::String)
+ ActiveSupport::JSON.decode(value) rescue nil
+ end
+
+ def serialize(value)
+ ActiveSupport::JSON.encode(value) unless value.nil?
+ end
+
+ def changed_in_place?(raw_old_value, new_value)
+ deserialize(raw_old_value) != new_value
+ end
+
+ def accessor
+ ActiveRecord::Store::StringKeyedHashAccessor
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/serialized.rb b/activerecord/lib/active_record/type/serialized.rb
new file mode 100644
index 0000000000..e882784691
--- /dev/null
+++ b/activerecord/lib/active_record/type/serialized.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Type
+ class Serialized < DelegateClass(ActiveModel::Type::Value) # :nodoc:
+ undef to_yaml if method_defined?(:to_yaml)
+
+ include ActiveModel::Type::Helpers::Mutable
+
+ attr_reader :subtype, :coder
+
+ def initialize(subtype, coder)
+ @subtype = subtype
+ @coder = coder
+ super(subtype)
+ end
+
+ def deserialize(value)
+ if default_value?(value)
+ value
+ else
+ coder.load(super)
+ end
+ end
+
+ def serialize(value)
+ return if value.nil?
+ unless default_value?(value)
+ super coder.dump(value)
+ end
+ end
+
+ def inspect
+ Kernel.instance_method(:inspect).bind(self).call
+ end
+
+ def changed_in_place?(raw_old_value, value)
+ return false if value.nil?
+ raw_new_value = encoded(value)
+ raw_old_value.nil? != raw_new_value.nil? ||
+ subtype.changed_in_place?(raw_old_value, raw_new_value)
+ end
+
+ def accessor
+ ActiveRecord::Store::IndifferentHashAccessor
+ end
+
+ def assert_valid_value(value)
+ if coder.respond_to?(:assert_valid_value)
+ coder.assert_valid_value(value, action: "serialize")
+ end
+ end
+
+ private
+
+ def default_value?(value)
+ value == coder.load(nil)
+ end
+
+ def encoded(value)
+ unless default_value?(value)
+ coder.dump(value)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/text.rb b/activerecord/lib/active_record/type/text.rb
new file mode 100644
index 0000000000..6d19696671
--- /dev/null
+++ b/activerecord/lib/active_record/type/text.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Type
+ class Text < ActiveModel::Type::String # :nodoc:
+ def type
+ :text
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/time.rb b/activerecord/lib/active_record/type/time.rb
new file mode 100644
index 0000000000..f4da1ecf2c
--- /dev/null
+++ b/activerecord/lib/active_record/type/time.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Type
+ class Time < ActiveModel::Type::Time
+ include Internal::Timezone
+
+ class Value < DelegateClass(::Time) # :nodoc:
+ end
+
+ def serialize(value)
+ case value = super
+ when ::Time
+ Value.new(value)
+ else
+ value
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/type_map.rb b/activerecord/lib/active_record/type/type_map.rb
new file mode 100644
index 0000000000..fc40b460f0
--- /dev/null
+++ b/activerecord/lib/active_record/type/type_map.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require "concurrent/map"
+
+module ActiveRecord
+ module Type
+ class TypeMap # :nodoc:
+ def initialize
+ @mapping = {}
+ @cache = Concurrent::Map.new do |h, key|
+ h.fetch_or_store(key, Concurrent::Map.new)
+ end
+ end
+
+ def lookup(lookup_key, *args)
+ fetch(lookup_key, *args) { Type.default_value }
+ end
+
+ def fetch(lookup_key, *args, &block)
+ @cache[lookup_key].fetch_or_store(args) do
+ perform_fetch(lookup_key, *args, &block)
+ end
+ end
+
+ def register_type(key, value = nil, &block)
+ raise ::ArgumentError unless value || block
+ @cache.clear
+
+ if block
+ @mapping[key] = block
+ else
+ @mapping[key] = proc { value }
+ end
+ end
+
+ def alias_type(key, target_key)
+ register_type(key) do |sql_type, *args|
+ metadata = sql_type[/\(.*\)/, 0]
+ lookup("#{target_key}#{metadata}", *args)
+ end
+ end
+
+ def clear
+ @mapping.clear
+ end
+
+ private
+
+ def perform_fetch(lookup_key, *args)
+ matching_pair = @mapping.reverse_each.detect do |key, _|
+ key === lookup_key
+ end
+
+ if matching_pair
+ matching_pair.last.call(lookup_key, *args)
+ else
+ yield lookup_key, *args
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/unsigned_integer.rb b/activerecord/lib/active_record/type/unsigned_integer.rb
new file mode 100644
index 0000000000..4619528f81
--- /dev/null
+++ b/activerecord/lib/active_record/type/unsigned_integer.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Type
+ class UnsignedInteger < ActiveModel::Type::Integer # :nodoc:
+ private
+
+ def max_value
+ super * 2
+ end
+
+ def min_value
+ 0
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type_caster.rb b/activerecord/lib/active_record/type_caster.rb
new file mode 100644
index 0000000000..ed2e4fb79c
--- /dev/null
+++ b/activerecord/lib/active_record/type_caster.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require_relative "type_caster/map"
+require_relative "type_caster/connection"
+
+module ActiveRecord
+ module TypeCaster # :nodoc:
+ end
+end
diff --git a/activerecord/lib/active_record/type_caster/connection.rb b/activerecord/lib/active_record/type_caster/connection.rb
new file mode 100644
index 0000000000..af4e4e37e2
--- /dev/null
+++ b/activerecord/lib/active_record/type_caster/connection.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module TypeCaster
+ class Connection # :nodoc:
+ def initialize(klass, table_name)
+ @klass = klass
+ @table_name = table_name
+ end
+
+ def type_cast_for_database(attribute_name, value)
+ return value if value.is_a?(Arel::Nodes::BindParam)
+ column = column_for(attribute_name)
+ connection.type_cast_from_column(column, value)
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+
+ attr_reader :table_name
+ delegate :connection, to: :@klass
+
+ private
+
+ def column_for(attribute_name)
+ if connection.schema_cache.data_source_exists?(table_name)
+ connection.schema_cache.columns_hash(table_name)[attribute_name.to_s]
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type_caster/map.rb b/activerecord/lib/active_record/type_caster/map.rb
new file mode 100644
index 0000000000..d51350ba83
--- /dev/null
+++ b/activerecord/lib/active_record/type_caster/map.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module TypeCaster
+ class Map # :nodoc:
+ def initialize(types)
+ @types = types
+ end
+
+ def type_cast_for_database(attr_name, value)
+ return value if value.is_a?(Arel::Nodes::BindParam)
+ type = types.type_for_attribute(attr_name.to_s)
+ type.serialize(value)
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+
+ attr_reader :types
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/validations.rb b/activerecord/lib/active_record/validations.rb
new file mode 100644
index 0000000000..3f5c879f2f
--- /dev/null
+++ b/activerecord/lib/active_record/validations.rb
@@ -0,0 +1,93 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # = Active Record \RecordInvalid
+ #
+ # Raised by {ActiveRecord::Base#save!}[rdoc-ref:Persistence#save!] and
+ # {ActiveRecord::Base#create!}[rdoc-ref:Persistence::ClassMethods#create!] when the record is invalid.
+ # Use the #record method to retrieve the record which did not validate.
+ #
+ # begin
+ # complex_operation_that_internally_calls_save!
+ # rescue ActiveRecord::RecordInvalid => invalid
+ # puts invalid.record.errors
+ # end
+ class RecordInvalid < ActiveRecordError
+ attr_reader :record
+
+ def initialize(record = nil)
+ if record
+ @record = record
+ errors = @record.errors.full_messages.join(", ")
+ message = I18n.t(:"#{@record.class.i18n_scope}.errors.messages.record_invalid", errors: errors, default: :"errors.messages.record_invalid")
+ else
+ message = "Record invalid"
+ end
+
+ super(message)
+ end
+ end
+
+ # = Active Record \Validations
+ #
+ # Active Record includes the majority of its validations from ActiveModel::Validations
+ # all of which accept the <tt>:on</tt> argument to define the context where the
+ # validations are active. Active Record will always supply either the context of
+ # <tt>:create</tt> or <tt>:update</tt> dependent on whether the model is a
+ # {new_record?}[rdoc-ref:Persistence#new_record?].
+ module Validations
+ extend ActiveSupport::Concern
+ include ActiveModel::Validations
+
+ # The validation process on save can be skipped by passing <tt>validate: false</tt>.
+ # The regular {ActiveRecord::Base#save}[rdoc-ref:Persistence#save] method is replaced
+ # with this when the validations module is mixed in, which it is by default.
+ def save(options = {})
+ perform_validations(options) ? super : false
+ end
+
+ # Attempts to save the record just like {ActiveRecord::Base#save}[rdoc-ref:Base#save] but
+ # will raise an ActiveRecord::RecordInvalid exception instead of returning +false+ if the record is not valid.
+ def save!(options = {})
+ perform_validations(options) ? super : raise_validation_error
+ end
+
+ # Runs all the validations within the specified context. Returns +true+ if
+ # no errors are found, +false+ otherwise.
+ #
+ # Aliased as #validate.
+ #
+ # If the argument is +false+ (default is +nil+), the context is set to <tt>:create</tt> if
+ # {new_record?}[rdoc-ref:Persistence#new_record?] is +true+, and to <tt>:update</tt> if it is not.
+ #
+ # \Validations with no <tt>:on</tt> option will run no matter the context. \Validations with
+ # some <tt>:on</tt> option will only run in the specified context.
+ def valid?(context = nil)
+ context ||= default_validation_context
+ output = super(context)
+ errors.empty? && output
+ end
+
+ alias_method :validate, :valid?
+
+ private
+
+ def default_validation_context
+ new_record? ? :create : :update
+ end
+
+ def raise_validation_error
+ raise(RecordInvalid.new(self))
+ end
+
+ def perform_validations(options = {})
+ options[:validate] == false || valid?(options[:context])
+ end
+ end
+end
+
+require_relative "validations/associated"
+require_relative "validations/uniqueness"
+require_relative "validations/presence"
+require_relative "validations/absence"
+require_relative "validations/length"
diff --git a/activerecord/lib/active_record/validations/absence.rb b/activerecord/lib/active_record/validations/absence.rb
new file mode 100644
index 0000000000..6afb9eabd2
--- /dev/null
+++ b/activerecord/lib/active_record/validations/absence.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Validations
+ class AbsenceValidator < ActiveModel::Validations::AbsenceValidator # :nodoc:
+ def validate_each(record, attribute, association_or_value)
+ if record.class._reflect_on_association(attribute)
+ association_or_value = Array.wrap(association_or_value).reject(&:marked_for_destruction?)
+ end
+ super
+ end
+ end
+
+ module ClassMethods
+ # Validates that the specified attributes are not present (as defined by
+ # Object#present?). If the attribute is an association, the associated object
+ # is considered absent if it was marked for destruction.
+ #
+ # See ActiveModel::Validations::HelperMethods.validates_absence_of for more information.
+ def validates_absence_of(*attr_names)
+ validates_with AbsenceValidator, _merge_attributes(attr_names)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/validations/associated.rb b/activerecord/lib/active_record/validations/associated.rb
new file mode 100644
index 0000000000..3538aeec22
--- /dev/null
+++ b/activerecord/lib/active_record/validations/associated.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Validations
+ class AssociatedValidator < ActiveModel::EachValidator #:nodoc:
+ def validate_each(record, attribute, value)
+ if Array(value).reject { |r| valid_object?(r) }.any?
+ record.errors.add(attribute, :invalid, options.merge(value: value))
+ end
+ end
+
+ private
+
+ def valid_object?(record)
+ (record.respond_to?(:marked_for_destruction?) && record.marked_for_destruction?) || record.valid?
+ end
+ end
+
+ module ClassMethods
+ # Validates whether the associated object or objects are all valid.
+ # Works with any kind of association.
+ #
+ # class Book < ActiveRecord::Base
+ # has_many :pages
+ # belongs_to :library
+ #
+ # validates_associated :pages, :library
+ # end
+ #
+ # WARNING: This validation must not be used on both ends of an association.
+ # Doing so will lead to a circular dependency and cause infinite recursion.
+ #
+ # NOTE: This validation will not fail if the association hasn't been
+ # assigned. If you want to ensure that the association is both present and
+ # guaranteed to be valid, you also need to use
+ # {validates_presence_of}[rdoc-ref:Validations::ClassMethods#validates_presence_of].
+ #
+ # Configuration options:
+ #
+ # * <tt>:message</tt> - A custom error message (default is: "is invalid").
+ # * <tt>:on</tt> - Specifies the contexts where this validation is active.
+ # Runs in all validation contexts by default +nil+. You can pass a symbol
+ # or an array of symbols. (e.g. <tt>on: :create</tt> or
+ # <tt>on: :custom_validation_context</tt> or
+ # <tt>on: [:create, :custom_validation_context]</tt>)
+ # * <tt>:if</tt> - Specifies a method, proc or string to call to determine
+ # if the validation should occur (e.g. <tt>if: :allow_validation</tt>,
+ # or <tt>if: Proc.new { |user| user.signup_step > 2 }</tt>). The method,
+ # proc or string should return or evaluate to a +true+ or +false+ value.
+ # * <tt>:unless</tt> - Specifies a method, proc or string to call to
+ # determine if the validation should not occur (e.g. <tt>unless: :skip_validation</tt>,
+ # or <tt>unless: Proc.new { |user| user.signup_step <= 2 }</tt>). The
+ # method, proc or string should return or evaluate to a +true+ or +false+
+ # value.
+ def validates_associated(*attr_names)
+ validates_with AssociatedValidator, _merge_attributes(attr_names)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/validations/length.rb b/activerecord/lib/active_record/validations/length.rb
new file mode 100644
index 0000000000..f47b14ae3a
--- /dev/null
+++ b/activerecord/lib/active_record/validations/length.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Validations
+ class LengthValidator < ActiveModel::Validations::LengthValidator # :nodoc:
+ def validate_each(record, attribute, association_or_value)
+ if association_or_value.respond_to?(:loaded?) && association_or_value.loaded?
+ association_or_value = association_or_value.target.reject(&:marked_for_destruction?)
+ end
+ super
+ end
+ end
+
+ module ClassMethods
+ # Validates that the specified attributes match the length restrictions supplied.
+ # If the attribute is an association, records that are marked for destruction are not counted.
+ #
+ # See ActiveModel::Validations::HelperMethods.validates_length_of for more information.
+ def validates_length_of(*attr_names)
+ validates_with LengthValidator, _merge_attributes(attr_names)
+ end
+
+ alias_method :validates_size_of, :validates_length_of
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/validations/presence.rb b/activerecord/lib/active_record/validations/presence.rb
new file mode 100644
index 0000000000..75e97e1997
--- /dev/null
+++ b/activerecord/lib/active_record/validations/presence.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Validations
+ class PresenceValidator < ActiveModel::Validations::PresenceValidator # :nodoc:
+ def validate_each(record, attribute, association_or_value)
+ if record.class._reflect_on_association(attribute)
+ association_or_value = Array.wrap(association_or_value).reject(&:marked_for_destruction?)
+ end
+ super
+ end
+ end
+
+ module ClassMethods
+ # Validates that the specified attributes are not blank (as defined by
+ # Object#blank?), and, if the attribute is an association, that the
+ # associated object is not marked for destruction. Happens by default
+ # on save.
+ #
+ # class Person < ActiveRecord::Base
+ # has_one :face
+ # validates_presence_of :face
+ # end
+ #
+ # The face attribute must be in the object and it cannot be blank or marked
+ # for destruction.
+ #
+ # If you want to validate the presence of a boolean field (where the real values
+ # are true and false), you will want to use
+ # <tt>validates_inclusion_of :field_name, in: [true, false]</tt>.
+ #
+ # This is due to the way Object#blank? handles boolean values:
+ # <tt>false.blank? # => true</tt>.
+ #
+ # This validator defers to the Active Model validation for presence, adding the
+ # check to see that an associated object is not marked for destruction. This
+ # prevents the parent object from validating successfully and saving, which then
+ # deletes the associated object, thus putting the parent object into an invalid
+ # state.
+ #
+ # NOTE: This validation will not fail while using it with an association
+ # if the latter was assigned but not valid. If you want to ensure that
+ # it is both present and valid, you also need to use
+ # {validates_associated}[rdoc-ref:Validations::ClassMethods#validates_associated].
+ #
+ # Configuration options:
+ # * <tt>:message</tt> - A custom error message (default is: "can't be blank").
+ # * <tt>:on</tt> - Specifies the contexts where this validation is active.
+ # Runs in all validation contexts by default +nil+. You can pass a symbol
+ # or an array of symbols. (e.g. <tt>on: :create</tt> or
+ # <tt>on: :custom_validation_context</tt> or
+ # <tt>on: [:create, :custom_validation_context]</tt>)
+ # * <tt>:if</tt> - Specifies a method, proc or string to call to determine if
+ # the validation should occur (e.g. <tt>if: :allow_validation</tt>, or
+ # <tt>if: Proc.new { |user| user.signup_step > 2 }</tt>). The method, proc
+ # or string should return or evaluate to a +true+ or +false+ value.
+ # * <tt>:unless</tt> - Specifies a method, proc or string to call to determine
+ # if the validation should not occur (e.g. <tt>unless: :skip_validation</tt>,
+ # or <tt>unless: Proc.new { |user| user.signup_step <= 2 }</tt>). The method,
+ # proc or string should return or evaluate to a +true+ or +false+ value.
+ # * <tt>:strict</tt> - Specifies whether validation should be strict.
+ # See ActiveModel::Validations#validates! for more information.
+ def validates_presence_of(*attr_names)
+ validates_with PresenceValidator, _merge_attributes(attr_names)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/validations/uniqueness.rb b/activerecord/lib/active_record/validations/uniqueness.rb
new file mode 100644
index 0000000000..2677fade18
--- /dev/null
+++ b/activerecord/lib/active_record/validations/uniqueness.rb
@@ -0,0 +1,236 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Validations
+ class UniquenessValidator < ActiveModel::EachValidator # :nodoc:
+ def initialize(options)
+ if options[:conditions] && !options[:conditions].respond_to?(:call)
+ raise ArgumentError, "#{options[:conditions]} was passed as :conditions but is not callable. " \
+ "Pass a callable instead: `conditions: -> { where(approved: true) }`"
+ end
+ super({ case_sensitive: true }.merge!(options))
+ @klass = options[:class]
+ end
+
+ def validate_each(record, attribute, value)
+ finder_class = find_finder_class_for(record)
+ value = map_enum_attribute(finder_class, attribute, value)
+
+ relation = build_relation(finder_class, attribute, value)
+ if record.persisted?
+ if finder_class.primary_key
+ relation = relation.where.not(finder_class.primary_key => record.id_in_database || record.id)
+ else
+ raise UnknownPrimaryKey.new(finder_class, "Can not validate uniqueness for persisted record without primary key.")
+ end
+ end
+ relation = scope_relation(record, relation)
+ relation = relation.merge(options[:conditions]) if options[:conditions]
+
+ if relation.exists?
+ error_options = options.except(:case_sensitive, :scope, :conditions)
+ error_options[:value] = value
+
+ record.errors.add(attribute, :taken, error_options)
+ end
+ end
+
+ private
+ # The check for an existing value should be run from a class that
+ # isn't abstract. This means working down from the current class
+ # (self), to the first non-abstract class. Since classes don't know
+ # their subclasses, we have to build the hierarchy between self and
+ # the record's class.
+ def find_finder_class_for(record)
+ class_hierarchy = [record.class]
+
+ while class_hierarchy.first != @klass
+ class_hierarchy.unshift(class_hierarchy.first.superclass)
+ end
+
+ class_hierarchy.detect { |klass| !klass.abstract_class? }
+ end
+
+ def build_relation(klass, attribute, value)
+ if reflection = klass._reflect_on_association(attribute)
+ attribute = reflection.foreign_key
+ value = value.attributes[reflection.klass.primary_key] unless value.nil?
+ end
+
+ if value.nil?
+ return klass.unscoped.where!(attribute => value)
+ end
+
+ # the attribute may be an aliased attribute
+ if klass.attribute_alias?(attribute)
+ attribute = klass.attribute_alias(attribute)
+ end
+
+ attribute_name = attribute.to_s
+ value = klass.predicate_builder.build_bind_attribute(attribute_name, value)
+
+ table = klass.arel_table
+ column = klass.columns_hash[attribute_name]
+
+ comparison = if !options[:case_sensitive]
+ # will use SQL LOWER function before comparison, unless it detects a case insensitive collation
+ klass.connection.case_insensitive_comparison(table, attribute, column, value)
+ else
+ klass.connection.case_sensitive_comparison(table, attribute, column, value)
+ end
+ klass.unscoped.where!(comparison)
+ end
+
+ def scope_relation(record, relation)
+ Array(options[:scope]).each do |scope_item|
+ scope_value = if record.class._reflect_on_association(scope_item)
+ record.association(scope_item).reader
+ else
+ record._read_attribute(scope_item)
+ end
+ relation = relation.where(scope_item => scope_value)
+ end
+
+ relation
+ end
+
+ def map_enum_attribute(klass, attribute, value)
+ mapping = klass.defined_enums[attribute.to_s]
+ value = mapping[value] if value && mapping
+ value
+ end
+ end
+
+ module ClassMethods
+ # Validates whether the value of the specified attributes are unique
+ # across the system. Useful for making sure that only one user
+ # can be named "davidhh".
+ #
+ # class Person < ActiveRecord::Base
+ # validates_uniqueness_of :user_name
+ # end
+ #
+ # It can also validate whether the value of the specified attributes are
+ # unique based on a <tt>:scope</tt> parameter:
+ #
+ # class Person < ActiveRecord::Base
+ # validates_uniqueness_of :user_name, scope: :account_id
+ # end
+ #
+ # Or even multiple scope parameters. For example, making sure that a
+ # teacher can only be on the schedule once per semester for a particular
+ # class.
+ #
+ # class TeacherSchedule < ActiveRecord::Base
+ # validates_uniqueness_of :teacher_id, scope: [:semester_id, :class_id]
+ # end
+ #
+ # It is also possible to limit the uniqueness constraint to a set of
+ # records matching certain conditions. In this example archived articles
+ # are not being taken into consideration when validating uniqueness
+ # of the title attribute:
+ #
+ # class Article < ActiveRecord::Base
+ # validates_uniqueness_of :title, conditions: -> { where.not(status: 'archived') }
+ # end
+ #
+ # When the record is created, a check is performed to make sure that no
+ # record exists in the database with the given value for the specified
+ # attribute (that maps to a column). When the record is updated,
+ # the same check is made but disregarding the record itself.
+ #
+ # Configuration options:
+ #
+ # * <tt>:message</tt> - Specifies a custom error message (default is:
+ # "has already been taken").
+ # * <tt>:scope</tt> - One or more columns by which to limit the scope of
+ # the uniqueness constraint.
+ # * <tt>:conditions</tt> - Specify the conditions to be included as a
+ # <tt>WHERE</tt> SQL fragment to limit the uniqueness constraint lookup
+ # (e.g. <tt>conditions: -> { where(status: 'active') }</tt>).
+ # * <tt>:case_sensitive</tt> - Looks for an exact match. Ignored by
+ # non-text columns (+true+ by default).
+ # * <tt>:allow_nil</tt> - If set to +true+, skips this validation if the
+ # attribute is +nil+ (default is +false+).
+ # * <tt>:allow_blank</tt> - If set to +true+, skips this validation if the
+ # attribute is blank (default is +false+).
+ # * <tt>:if</tt> - Specifies a method, proc or string to call to determine
+ # if the validation should occur (e.g. <tt>if: :allow_validation</tt>,
+ # or <tt>if: Proc.new { |user| user.signup_step > 2 }</tt>). The method,
+ # proc or string should return or evaluate to a +true+ or +false+ value.
+ # * <tt>:unless</tt> - Specifies a method, proc or string to call to
+ # determine if the validation should not occur (e.g. <tt>unless: :skip_validation</tt>,
+ # or <tt>unless: Proc.new { |user| user.signup_step <= 2 }</tt>). The
+ # method, proc or string should return or evaluate to a +true+ or +false+
+ # value.
+ #
+ # === Concurrency and integrity
+ #
+ # Using this validation method in conjunction with
+ # {ActiveRecord::Base#save}[rdoc-ref:Persistence#save]
+ # does not guarantee the absence of duplicate record insertions, because
+ # uniqueness checks on the application level are inherently prone to race
+ # conditions. For example, suppose that two users try to post a Comment at
+ # the same time, and a Comment's title must be unique. At the database-level,
+ # the actions performed by these users could be interleaved in the following manner:
+ #
+ # User 1 | User 2
+ # ------------------------------------+--------------------------------------
+ # # User 1 checks whether there's |
+ # # already a comment with the title |
+ # # 'My Post'. This is not the case. |
+ # SELECT * FROM comments |
+ # WHERE title = 'My Post' |
+ # |
+ # | # User 2 does the same thing and also
+ # | # infers that their title is unique.
+ # | SELECT * FROM comments
+ # | WHERE title = 'My Post'
+ # |
+ # # User 1 inserts their comment. |
+ # INSERT INTO comments |
+ # (title, content) VALUES |
+ # ('My Post', 'hi!') |
+ # |
+ # | # User 2 does the same thing.
+ # | INSERT INTO comments
+ # | (title, content) VALUES
+ # | ('My Post', 'hello!')
+ # |
+ # | # ^^^^^^
+ # | # Boom! We now have a duplicate
+ # | # title!
+ #
+ # This could even happen if you use transactions with the 'serializable'
+ # isolation level. The best way to work around this problem is to add a unique
+ # index to the database table using
+ # {connection.add_index}[rdoc-ref:ConnectionAdapters::SchemaStatements#add_index].
+ # In the rare case that a race condition occurs, the database will guarantee
+ # the field's uniqueness.
+ #
+ # When the database catches such a duplicate insertion,
+ # {ActiveRecord::Base#save}[rdoc-ref:Persistence#save] will raise an ActiveRecord::StatementInvalid
+ # exception. You can either choose to let this error propagate (which
+ # will result in the default Rails exception page being shown), or you
+ # can catch it and restart the transaction (e.g. by telling the user
+ # that the title already exists, and asking them to re-enter the title).
+ # This technique is also known as
+ # {optimistic concurrency control}[http://en.wikipedia.org/wiki/Optimistic_concurrency_control].
+ #
+ # The bundled ActiveRecord::ConnectionAdapters distinguish unique index
+ # constraint errors from other types of database errors by throwing an
+ # ActiveRecord::RecordNotUnique exception. For other adapters you will
+ # have to parse the (database-specific) exception message to detect such
+ # a case.
+ #
+ # The following bundled adapters throw the ActiveRecord::RecordNotUnique exception:
+ #
+ # * ActiveRecord::ConnectionAdapters::Mysql2Adapter.
+ # * ActiveRecord::ConnectionAdapters::SQLite3Adapter.
+ # * ActiveRecord::ConnectionAdapters::PostgreSQLAdapter.
+ def validates_uniqueness_of(*attr_names)
+ validates_with UniquenessValidator, _merge_attributes(attr_names)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/version.rb b/activerecord/lib/active_record/version.rb
new file mode 100644
index 0000000000..6b0d82d8fc
--- /dev/null
+++ b/activerecord/lib/active_record/version.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require_relative "gem_version"
+
+module ActiveRecord
+ # Returns the version of the currently loaded ActiveRecord as a <tt>Gem::Version</tt>
+ def self.version
+ gem_version
+ end
+end
diff --git a/activerecord/lib/rails/generators/active_record.rb b/activerecord/lib/rails/generators/active_record.rb
new file mode 100644
index 0000000000..a7e5e373a7
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require "rails/generators/named_base"
+require "rails/generators/active_model"
+require "rails/generators/active_record/migration"
+require "active_record"
+
+module ActiveRecord
+ module Generators # :nodoc:
+ class Base < Rails::Generators::NamedBase # :nodoc:
+ include ActiveRecord::Generators::Migration
+
+ # Set the current directory as base for the inherited generators.
+ def self.base_root
+ __dir__
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/rails/generators/active_record/application_record/application_record_generator.rb b/activerecord/lib/rails/generators/active_record/application_record/application_record_generator.rb
new file mode 100644
index 0000000000..d18330f5b2
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record/application_record/application_record_generator.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require "rails/generators/active_record"
+
+module ActiveRecord
+ module Generators # :nodoc:
+ class ApplicationRecordGenerator < ::Rails::Generators::Base # :nodoc:
+ source_root File.expand_path("templates", __dir__)
+
+ # FIXME: Change this file to a symlink once RubyGems 2.5.0 is required.
+ def create_application_record
+ template "application_record.rb", application_record_file_name
+ end
+
+ private
+
+ def application_record_file_name
+ @application_record_file_name ||= if namespaced?
+ "app/models/#{namespaced_path}/application_record.rb"
+ else
+ "app/models/application_record.rb"
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/rails/generators/active_record/application_record/templates/application_record.rb b/activerecord/lib/rails/generators/active_record/application_record/templates/application_record.rb
new file mode 100644
index 0000000000..60050e0bf8
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record/application_record/templates/application_record.rb
@@ -0,0 +1,5 @@
+<% module_namespacing do -%>
+class ApplicationRecord < ActiveRecord::Base
+ self.abstract_class = true
+end
+<% end -%>
diff --git a/activerecord/lib/rails/generators/active_record/migration.rb b/activerecord/lib/rails/generators/active_record/migration.rb
new file mode 100644
index 0000000000..4ceb502c5d
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record/migration.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require "rails/generators/migration"
+
+module ActiveRecord
+ module Generators # :nodoc:
+ module Migration
+ extend ActiveSupport::Concern
+ include Rails::Generators::Migration
+
+ module ClassMethods
+ # Implement the required interface for Rails::Generators::Migration.
+ def next_migration_number(dirname)
+ next_migration_number = current_migration_number(dirname) + 1
+ ActiveRecord::Migration.next_migration_number(next_migration_number)
+ end
+ end
+
+ private
+
+ def primary_key_type
+ key_type = options[:primary_key_type]
+ ", id: :#{key_type}" if key_type
+ end
+
+ def db_migrate_path
+ if defined?(Rails.application) && Rails.application
+ Rails.application.config.paths["db/migrate"].to_ary.first
+ else
+ "db/migrate"
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/rails/generators/active_record/migration/migration_generator.rb b/activerecord/lib/rails/generators/active_record/migration/migration_generator.rb
new file mode 100644
index 0000000000..0174c7ea31
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record/migration/migration_generator.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require "rails/generators/active_record"
+
+module ActiveRecord
+ module Generators # :nodoc:
+ class MigrationGenerator < Base # :nodoc:
+ argument :attributes, type: :array, default: [], banner: "field[:type][:index] field[:type][:index]"
+
+ class_option :primary_key_type, type: :string, desc: "The type for primary key"
+
+ def create_migration_file
+ set_local_assigns!
+ validate_file_name!
+ migration_template @migration_template, File.join(db_migrate_path, "#{file_name}.rb")
+ end
+
+ # TODO Change this to private once we've dropped Ruby 2.2 support.
+ # Workaround for Ruby 2.2 "private attribute?" warning.
+ protected
+ attr_reader :migration_action, :join_tables
+
+ private
+
+ # Sets the default migration template that is being used for the generation of the migration.
+ # Depending on command line arguments, the migration template and the table name instance
+ # variables are set up.
+ def set_local_assigns!
+ @migration_template = "migration.rb"
+ case file_name
+ when /^(add|remove)_.*_(?:to|from)_(.*)/
+ @migration_action = $1
+ @table_name = normalize_table_name($2)
+ when /join_table/
+ if attributes.length == 2
+ @migration_action = "join"
+ @join_tables = pluralize_table_names? ? attributes.map(&:plural_name) : attributes.map(&:singular_name)
+
+ set_index_names
+ end
+ when /^create_(.+)/
+ @table_name = normalize_table_name($1)
+ @migration_template = "create_table_migration.rb"
+ end
+ end
+
+ def set_index_names
+ attributes.each_with_index do |attr, i|
+ attr.index_name = [attr, attributes[i - 1]].map { |a| index_name_for(a) }
+ end
+ end
+
+ def index_name_for(attribute)
+ if attribute.foreign_key?
+ attribute.name
+ else
+ attribute.name.singularize.foreign_key
+ end.to_sym
+ end
+
+ def attributes_with_index
+ attributes.select { |a| !a.reference? && a.has_index? }
+ end
+
+ # A migration file name can only contain underscores (_), lowercase characters,
+ # and numbers 0-9. Any other file name will raise an IllegalMigrationNameError.
+ def validate_file_name!
+ unless /^[_a-z0-9]+$/.match?(file_name)
+ raise IllegalMigrationNameError.new(file_name)
+ end
+ end
+
+ def normalize_table_name(_table_name)
+ pluralize_table_names? ? _table_name.pluralize : _table_name.singularize
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/rails/generators/active_record/migration/templates/create_table_migration.rb b/activerecord/lib/rails/generators/active_record/migration/templates/create_table_migration.rb
new file mode 100644
index 0000000000..5f7201cfe1
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record/migration/templates/create_table_migration.rb
@@ -0,0 +1,24 @@
+class <%= migration_class_name %> < ActiveRecord::Migration[<%= ActiveRecord::Migration.current_version %>]
+ def change
+ create_table :<%= table_name %><%= primary_key_type %> do |t|
+<% attributes.each do |attribute| -%>
+<% if attribute.password_digest? -%>
+ t.string :password_digest<%= attribute.inject_options %>
+<% elsif attribute.token? -%>
+ t.string :<%= attribute.name %><%= attribute.inject_options %>
+<% else -%>
+ t.<%= attribute.type %> :<%= attribute.name %><%= attribute.inject_options %>
+<% end -%>
+<% end -%>
+<% if options[:timestamps] %>
+ t.timestamps
+<% end -%>
+ end
+<% attributes.select(&:token?).each do |attribute| -%>
+ add_index :<%= table_name %>, :<%= attribute.index_name %><%= attribute.inject_index_options %>, unique: true
+<% end -%>
+<% attributes_with_index.each do |attribute| -%>
+ add_index :<%= table_name %>, :<%= attribute.index_name %><%= attribute.inject_index_options %>
+<% end -%>
+ end
+end
diff --git a/activerecord/lib/rails/generators/active_record/migration/templates/migration.rb b/activerecord/lib/rails/generators/active_record/migration/templates/migration.rb
new file mode 100644
index 0000000000..481c70201b
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record/migration/templates/migration.rb
@@ -0,0 +1,46 @@
+class <%= migration_class_name %> < ActiveRecord::Migration[<%= ActiveRecord::Migration.current_version %>]
+<%- if migration_action == 'add' -%>
+ def change
+<% attributes.each do |attribute| -%>
+ <%- if attribute.reference? -%>
+ add_reference :<%= table_name %>, :<%= attribute.name %><%= attribute.inject_options %>
+ <%- elsif attribute.token? -%>
+ add_column :<%= table_name %>, :<%= attribute.name %>, :string<%= attribute.inject_options %>
+ add_index :<%= table_name %>, :<%= attribute.index_name %><%= attribute.inject_index_options %>, unique: true
+ <%- else -%>
+ add_column :<%= table_name %>, :<%= attribute.name %>, :<%= attribute.type %><%= attribute.inject_options %>
+ <%- if attribute.has_index? -%>
+ add_index :<%= table_name %>, :<%= attribute.index_name %><%= attribute.inject_index_options %>
+ <%- end -%>
+ <%- end -%>
+<%- end -%>
+ end
+<%- elsif migration_action == 'join' -%>
+ def change
+ create_join_table :<%= join_tables.first %>, :<%= join_tables.second %> do |t|
+ <%- attributes.each do |attribute| -%>
+ <%- if attribute.reference? -%>
+ t.references :<%= attribute.name %><%= attribute.inject_options %>
+ <%- else -%>
+ <%= '# ' unless attribute.has_index? -%>t.index <%= attribute.index_name %><%= attribute.inject_index_options %>
+ <%- end -%>
+ <%- end -%>
+ end
+ end
+<%- else -%>
+ def change
+<% attributes.each do |attribute| -%>
+<%- if migration_action -%>
+ <%- if attribute.reference? -%>
+ remove_reference :<%= table_name %>, :<%= attribute.name %><%= attribute.inject_options %>
+ <%- else -%>
+ <%- if attribute.has_index? -%>
+ remove_index :<%= table_name %>, :<%= attribute.index_name %><%= attribute.inject_index_options %>
+ <%- end -%>
+ remove_column :<%= table_name %>, :<%= attribute.name %>, :<%= attribute.type %><%= attribute.inject_options %>
+ <%- end -%>
+<%- end -%>
+<%- end -%>
+ end
+<%- end -%>
+end
diff --git a/activerecord/lib/rails/generators/active_record/model/model_generator.rb b/activerecord/lib/rails/generators/active_record/model/model_generator.rb
new file mode 100644
index 0000000000..25e54f3ac8
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record/model/model_generator.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require "rails/generators/active_record"
+
+module ActiveRecord
+ module Generators # :nodoc:
+ class ModelGenerator < Base # :nodoc:
+ argument :attributes, type: :array, default: [], banner: "field[:type][:index] field[:type][:index]"
+
+ check_class_collision
+
+ class_option :migration, type: :boolean
+ class_option :timestamps, type: :boolean
+ class_option :parent, type: :string, desc: "The parent class for the generated model"
+ class_option :indexes, type: :boolean, default: true, desc: "Add indexes for references and belongs_to columns"
+ class_option :primary_key_type, type: :string, desc: "The type for primary key"
+
+ # creates the migration file for the model.
+ def create_migration_file
+ return unless options[:migration] && options[:parent].nil?
+ attributes.each { |a| a.attr_options.delete(:index) if a.reference? && !a.has_index? } if options[:indexes] == false
+ migration_template "../../migration/templates/create_table_migration.rb", File.join(db_migrate_path, "create_#{table_name}.rb")
+ end
+
+ def create_model_file
+ template "model.rb", File.join("app/models", class_path, "#{file_name}.rb")
+ end
+
+ def create_module_file
+ return if regular_class_path.empty?
+ template "module.rb", File.join("app/models", "#{class_path.join('/')}.rb") if behavior == :invoke
+ end
+
+ hook_for :test_framework
+
+ private
+
+ def attributes_with_index
+ attributes.select { |a| !a.reference? && a.has_index? }
+ end
+
+ # Used by the migration template to determine the parent name of the model
+ def parent_class_name
+ options[:parent] || "ApplicationRecord"
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/rails/generators/active_record/model/templates/model.rb b/activerecord/lib/rails/generators/active_record/model/templates/model.rb
new file mode 100644
index 0000000000..55dc65c8ad
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record/model/templates/model.rb
@@ -0,0 +1,13 @@
+<% module_namespacing do -%>
+class <%= class_name %> < <%= parent_class_name.classify %>
+<% attributes.select(&:reference?).each do |attribute| -%>
+ belongs_to :<%= attribute.name %><%= ', polymorphic: true' if attribute.polymorphic? %><%= ', required: true' if attribute.required? %>
+<% end -%>
+<% attributes.select(&:token?).each do |attribute| -%>
+ has_secure_token<% if attribute.name != "token" %> :<%= attribute.name %><% end %>
+<% end -%>
+<% if attributes.any?(&:password_digest?) -%>
+ has_secure_password
+<% end -%>
+end
+<% end -%>
diff --git a/activerecord/lib/rails/generators/active_record/model/templates/module.rb b/activerecord/lib/rails/generators/active_record/model/templates/module.rb
new file mode 100644
index 0000000000..a3bf1c37b6
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record/model/templates/module.rb
@@ -0,0 +1,7 @@
+<% module_namespacing do -%>
+module <%= class_path.map(&:camelize).join('::') %>
+ def self.table_name_prefix
+ '<%= namespaced? ? namespaced_class_path.join('_') : class_path.join('_') %>_'
+ end
+end
+<% end -%>