aboutsummaryrefslogtreecommitdiffstats
path: root/activerecord/lib/active_record
diff options
context:
space:
mode:
Diffstat (limited to 'activerecord/lib/active_record')
-rw-r--r--activerecord/lib/active_record/aggregations.rb494
-rw-r--r--activerecord/lib/active_record/association_relation.rb15
-rw-r--r--activerecord/lib/active_record/associations.rb3194
-rw-r--r--activerecord/lib/active_record/associations/alias_tracker.rb57
-rw-r--r--activerecord/lib/active_record/associations/association.rb169
-rw-r--r--activerecord/lib/active_record/associations/association_scope.rb190
-rw-r--r--activerecord/lib/active_record/associations/belongs_to_association.rb99
-rw-r--r--activerecord/lib/active_record/associations/belongs_to_polymorphic_association.rb20
-rw-r--r--activerecord/lib/active_record/associations/builder/association.rb11
-rw-r--r--activerecord/lib/active_record/associations/builder/belongs_to.rb90
-rw-r--r--activerecord/lib/active_record/associations/builder/collection_association.rb11
-rw-r--r--activerecord/lib/active_record/associations/builder/has_and_belongs_to_many.rb105
-rw-r--r--activerecord/lib/active_record/associations/builder/has_many.rb2
-rw-r--r--activerecord/lib/active_record/associations/builder/has_one.rb2
-rw-r--r--activerecord/lib/active_record/associations/builder/singular_association.rb13
-rw-r--r--activerecord/lib/active_record/associations/collection_association.rb414
-rw-r--r--activerecord/lib/active_record/associations/collection_proxy.rb321
-rw-r--r--activerecord/lib/active_record/associations/foreign_association.rb9
-rw-r--r--activerecord/lib/active_record/associations/has_many_association.rb60
-rw-r--r--activerecord/lib/active_record/associations/has_many_through_association.rb106
-rw-r--r--activerecord/lib/active_record/associations/has_one_association.rb113
-rw-r--r--activerecord/lib/active_record/associations/has_one_through_association.rb31
-rw-r--r--activerecord/lib/active_record/associations/join_dependency.rb305
-rw-r--r--activerecord/lib/active_record/associations/join_dependency/join_association.rb121
-rw-r--r--activerecord/lib/active_record/associations/join_dependency/join_base.rb19
-rw-r--r--activerecord/lib/active_record/associations/join_dependency/join_part.rb28
-rw-r--r--activerecord/lib/active_record/associations/preloader.rb192
-rw-r--r--activerecord/lib/active_record/associations/preloader/association.rb205
-rw-r--r--activerecord/lib/active_record/associations/preloader/belongs_to.rb17
-rw-r--r--activerecord/lib/active_record/associations/preloader/collection_association.rb18
-rw-r--r--activerecord/lib/active_record/associations/preloader/has_many.rb17
-rw-r--r--activerecord/lib/active_record/associations/preloader/has_many_through.rb19
-rw-r--r--activerecord/lib/active_record/associations/preloader/has_one.rb15
-rw-r--r--activerecord/lib/active_record/associations/preloader/has_one_through.rb9
-rw-r--r--activerecord/lib/active_record/associations/preloader/singular_association.rb21
-rw-r--r--activerecord/lib/active_record/associations/preloader/through_association.rb157
-rw-r--r--activerecord/lib/active_record/associations/singular_association.rb63
-rw-r--r--activerecord/lib/active_record/associations/through_association.rb42
-rw-r--r--activerecord/lib/active_record/attribute.rb207
-rw-r--r--activerecord/lib/active_record/attribute/user_provided_default.rb28
-rw-r--r--activerecord/lib/active_record/attribute_assignment.rb114
-rw-r--r--activerecord/lib/active_record/attribute_decorators.rb53
-rw-r--r--activerecord/lib/active_record/attribute_methods.rb237
-rw-r--r--activerecord/lib/active_record/attribute_methods/before_type_cast.rb16
-rw-r--r--activerecord/lib/active_record/attribute_methods/dirty.rb271
-rw-r--r--activerecord/lib/active_record/attribute_methods/primary_key.rb166
-rw-r--r--activerecord/lib/active_record/attribute_methods/query.rb5
-rw-r--r--activerecord/lib/active_record/attribute_methods/read.rb76
-rw-r--r--activerecord/lib/active_record/attribute_methods/serialization.rb37
-rw-r--r--activerecord/lib/active_record/attribute_methods/time_zone_conversion.rb103
-rw-r--r--activerecord/lib/active_record/attribute_methods/write.rb72
-rw-r--r--activerecord/lib/active_record/attribute_mutation_tracker.rb70
-rw-r--r--activerecord/lib/active_record/attribute_set.rb108
-rw-r--r--activerecord/lib/active_record/attribute_set/builder.rb108
-rw-r--r--activerecord/lib/active_record/attributes.rb73
-rw-r--r--activerecord/lib/active_record/autosave_association.rb93
-rw-r--r--activerecord/lib/active_record/base.rb52
-rw-r--r--activerecord/lib/active_record/callbacks.rb102
-rw-r--r--activerecord/lib/active_record/coders/json.rb4
-rw-r--r--activerecord/lib/active_record/coders/yaml_column.rb22
-rw-r--r--activerecord/lib/active_record/collection_cache_key.rb37
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb852
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb18
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb316
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb100
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/quoting.rb138
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/savepoints.rb8
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb76
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb336
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb146
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb574
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/transaction.rb231
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract_adapter.rb558
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb1001
-rw-r--r--activerecord/lib/active_record/connection_adapters/column.rb39
-rw-r--r--activerecord/lib/active_record/connection_adapters/connection_specification.rb309
-rw-r--r--activerecord/lib/active_record/connection_adapters/determine_if_preparable_visitor.rb11
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/column.rb39
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/database_statements.rb201
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/explain_pretty_printer.rb46
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/quoting.rb31
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/schema_creation.rb103
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/schema_definitions.rb114
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/schema_dumper.rb112
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/schema_statements.rb261
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql/type_metadata.rb15
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql2_adapter.rb125
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/column.rb30
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/database_statements.rb110
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/explain_pretty_printer.rb8
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid.rb45
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/array.rb46
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/bit.rb15
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/bit_varying.rb2
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/bytea.rb4
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/cidr.rb4
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/date.rb23
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/date_time.rb6
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/decimal.rb4
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/enum.rb8
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/hstore.rb46
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/inet.rb2
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/jsonb.rb14
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/legacy_point.rb45
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/money.rb8
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/oid.rb (renamed from activerecord/lib/active_record/connection_adapters/postgresql/oid/json.rb)7
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/point.rb40
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/rails_5_1_point.rb50
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/range.rb64
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/specialized_string.rb5
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/type_map_initializer.rb112
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/uuid.rb13
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/vector.rb2
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/xml.rb2
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/quoting.rb118
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/referential_integrity.rb44
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/schema_creation.rb76
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/schema_definitions.rb252
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/schema_dumper.rb67
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/schema_statements.rb712
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/type_metadata.rb15
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/utils.rb20
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb701
-rw-r--r--activerecord/lib/active_record/connection_adapters/schema_cache.rb51
-rw-r--r--activerecord/lib/active_record/connection_adapters/sql_type_metadata.rb8
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite3/database_statements.rb38
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite3/explain_pretty_printer.rb4
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite3/quoting.rb65
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite3/schema_creation.rb4
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite3/schema_definitions.rb19
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite3/schema_dumper.rb18
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite3/schema_statements.rb137
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb530
-rw-r--r--activerecord/lib/active_record/connection_adapters/statement_pool.rb26
-rw-r--r--activerecord/lib/active_record/connection_handling.rb198
-rw-r--r--activerecord/lib/active_record/core.rb356
-rw-r--r--activerecord/lib/active_record/counter_cache.rb85
-rw-r--r--activerecord/lib/active_record/database_configurations.rb204
-rw-r--r--activerecord/lib/active_record/database_configurations/database_config.rb37
-rw-r--r--activerecord/lib/active_record/database_configurations/hash_config.rb50
-rw-r--r--activerecord/lib/active_record/database_configurations/url_config.rb79
-rw-r--r--activerecord/lib/active_record/define_callbacks.rb22
-rw-r--r--activerecord/lib/active_record/dynamic_matchers.rb173
-rw-r--r--activerecord/lib/active_record/enum.rb83
-rw-r--r--activerecord/lib/active_record/errors.rb136
-rw-r--r--activerecord/lib/active_record/explain.rb34
-rw-r--r--activerecord/lib/active_record/explain_registry.rb4
-rw-r--r--activerecord/lib/active_record/explain_subscriber.rb13
-rw-r--r--activerecord/lib/active_record/fixture_set/file.rb21
-rw-r--r--activerecord/lib/active_record/fixture_set/model_metadata.rb33
-rw-r--r--activerecord/lib/active_record/fixture_set/render_context.rb17
-rw-r--r--activerecord/lib/active_record/fixture_set/table_row.rb153
-rw-r--r--activerecord/lib/active_record/fixture_set/table_rows.rb47
-rw-r--r--activerecord/lib/active_record/fixtures.rb692
-rw-r--r--activerecord/lib/active_record/gem_version.rb4
-rw-r--r--activerecord/lib/active_record/inheritance.rb248
-rw-r--r--activerecord/lib/active_record/insert_all.rb178
-rw-r--r--activerecord/lib/active_record/integration.rb136
-rw-r--r--activerecord/lib/active_record/internal_metadata.rb27
-rw-r--r--activerecord/lib/active_record/legacy_yaml_adapter.rb6
-rw-r--r--activerecord/lib/active_record/locking/optimistic.rb170
-rw-r--r--activerecord/lib/active_record/locking/pessimistic.rb24
-rw-r--r--activerecord/lib/active_record/log_subscriber.rb85
-rw-r--r--activerecord/lib/active_record/middleware/database_selector.rb75
-rw-r--r--activerecord/lib/active_record/middleware/database_selector/resolver.rb90
-rw-r--r--activerecord/lib/active_record/middleware/database_selector/resolver/session.rb45
-rw-r--r--activerecord/lib/active_record/migration.rb618
-rw-r--r--activerecord/lib/active_record/migration/command_recorder.rb217
-rw-r--r--activerecord/lib/active_record/migration/compatibility.rb228
-rw-r--r--activerecord/lib/active_record/migration/join_table.rb14
-rw-r--r--activerecord/lib/active_record/model_schema.rb410
-rw-r--r--activerecord/lib/active_record/nested_attributes.rb415
-rw-r--r--activerecord/lib/active_record/no_touching.rb15
-rw-r--r--activerecord/lib/active_record/null_relation.rb47
-rw-r--r--activerecord/lib/active_record/persistence.rb538
-rw-r--r--activerecord/lib/active_record/query_cache.rb40
-rw-r--r--activerecord/lib/active_record/querying.rb63
-rw-r--r--activerecord/lib/active_record/railtie.rb165
-rw-r--r--activerecord/lib/active_record/railties/collection_cache_association_loading.rb34
-rw-r--r--activerecord/lib/active_record/railties/console_sandbox.rb2
-rw-r--r--activerecord/lib/active_record/railties/controller_runtime.rb67
-rw-r--r--activerecord/lib/active_record/railties/databases.rake427
-rw-r--r--activerecord/lib/active_record/railties/jdbcmysql_error.rb16
-rw-r--r--activerecord/lib/active_record/readonly_attributes.rb9
-rw-r--r--activerecord/lib/active_record/reflection.rb456
-rw-r--r--activerecord/lib/active_record/relation.rb657
-rw-r--r--activerecord/lib/active_record/relation/batches.rb165
-rw-r--r--activerecord/lib/active_record/relation/batches/batch_enumerator.rb6
-rw-r--r--activerecord/lib/active_record/relation/calculations.rb390
-rw-r--r--activerecord/lib/active_record/relation/delegation.rb129
-rw-r--r--activerecord/lib/active_record/relation/finder_methods.rb458
-rw-r--r--activerecord/lib/active_record/relation/from_clause.rb10
-rw-r--r--activerecord/lib/active_record/relation/merger.rb144
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder.rb191
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/array_handler.rb34
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/association_query_handler.rb88
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/association_query_value.rb43
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/base_handler.rb7
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/basic_object_handler.rb10
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/class_handler.rb27
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/polymorphic_array_handler.rb57
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/polymorphic_array_value.rb53
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/range_handler.rb25
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/relation_handler.rb6
-rw-r--r--activerecord/lib/active_record/relation/query_attribute.rb35
-rw-r--r--activerecord/lib/active_record/relation/query_methods.rb720
-rw-r--r--activerecord/lib/active_record/relation/record_fetch_warning.rb8
-rw-r--r--activerecord/lib/active_record/relation/spawn_methods.rb15
-rw-r--r--activerecord/lib/active_record/relation/where_clause.rb197
-rw-r--r--activerecord/lib/active_record/relation/where_clause_factory.rb18
-rw-r--r--activerecord/lib/active_record/result.rb116
-rw-r--r--activerecord/lib/active_record/runtime_registry.rb8
-rw-r--r--activerecord/lib/active_record/sanitization.rb150
-rw-r--r--activerecord/lib/active_record/schema.rb21
-rw-r--r--activerecord/lib/active_record/schema_dumper.rb188
-rw-r--r--activerecord/lib/active_record/schema_migration.rb18
-rw-r--r--activerecord/lib/active_record/scoping.rb42
-rw-r--r--activerecord/lib/active_record/scoping/default.rb186
-rw-r--r--activerecord/lib/active_record/scoping/named.rb84
-rw-r--r--activerecord/lib/active_record/secure_token.rb6
-rw-r--r--activerecord/lib/active_record/serialization.rb4
-rw-r--r--activerecord/lib/active_record/statement_cache.rb93
-rw-r--r--activerecord/lib/active_record/store.rb162
-rw-r--r--activerecord/lib/active_record/suppressor.rb5
-rw-r--r--activerecord/lib/active_record/table_metadata.rb35
-rw-r--r--activerecord/lib/active_record/tasks/database_tasks.rb279
-rw-r--r--activerecord/lib/active_record/tasks/mysql_database_tasks.rb144
-rw-r--r--activerecord/lib/active_record/tasks/postgresql_database_tasks.rb127
-rw-r--r--activerecord/lib/active_record/tasks/sqlite_database_tasks.rb50
-rw-r--r--activerecord/lib/active_record/test_databases.rb38
-rw-r--r--activerecord/lib/active_record/test_fixtures.rb224
-rw-r--r--activerecord/lib/active_record/timestamp.rb95
-rw-r--r--activerecord/lib/active_record/touch_later.rb12
-rw-r--r--activerecord/lib/active_record/transactions.rb250
-rw-r--r--activerecord/lib/active_record/translation.rb4
-rw-r--r--activerecord/lib/active_record/type.rb40
-rw-r--r--activerecord/lib/active_record/type/adapter_specific_registry.rb89
-rw-r--r--activerecord/lib/active_record/type/date.rb2
-rw-r--r--activerecord/lib/active_record/type/date_time.rb2
-rw-r--r--activerecord/lib/active_record/type/decimal_without_scale.rb15
-rw-r--r--activerecord/lib/active_record/type/hash_lookup_type_map.rb8
-rw-r--r--activerecord/lib/active_record/type/internal/abstract_json.rb33
-rw-r--r--activerecord/lib/active_record/type/internal/timezone.rb2
-rw-r--r--activerecord/lib/active_record/type/json.rb30
-rw-r--r--activerecord/lib/active_record/type/serialized.rb24
-rw-r--r--activerecord/lib/active_record/type/text.rb11
-rw-r--r--activerecord/lib/active_record/type/time.rb15
-rw-r--r--activerecord/lib/active_record/type/type_map.rb28
-rw-r--r--activerecord/lib/active_record/type/unsigned_integer.rb17
-rw-r--r--activerecord/lib/active_record/type_caster.rb6
-rw-r--r--activerecord/lib/active_record/type_caster/connection.rb17
-rw-r--r--activerecord/lib/active_record/type_caster/map.rb9
-rw-r--r--activerecord/lib/active_record/validations.rb10
-rw-r--r--activerecord/lib/active_record/validations/absence.rb2
-rw-r--r--activerecord/lib/active_record/validations/associated.rb4
-rw-r--r--activerecord/lib/active_record/validations/length.rb2
-rw-r--r--activerecord/lib/active_record/validations/presence.rb6
-rw-r--r--activerecord/lib/active_record/validations/uniqueness.rb81
-rw-r--r--activerecord/lib/active_record/version.rb4
259 files changed, 18132 insertions, 13673 deletions
diff --git a/activerecord/lib/active_record/aggregations.rb b/activerecord/lib/active_record/aggregations.rb
index 3ff41ed81b..3250e29b82 100644
--- a/activerecord/lib/active_record/aggregations.rb
+++ b/activerecord/lib/active_record/aggregations.rb
@@ -1,8 +1,8 @@
+# frozen_string_literal: true
+
module ActiveRecord
# See ActiveRecord::Aggregations::ClassMethods for documentation
module Aggregations
- extend ActiveSupport::Concern
-
def initialize_dup(*) # :nodoc:
@aggregation_cache = {}
super
@@ -15,265 +15,271 @@ module ActiveRecord
private
- def clear_aggregation_cache # :nodoc:
+ def clear_aggregation_cache
@aggregation_cache.clear if persisted?
end
- def init_internals # :nodoc:
+ def init_internals
@aggregation_cache = {}
super
end
- # Active Record implements aggregation through a macro-like class method called #composed_of
- # for representing attributes as value objects. It expresses relationships like "Account [is]
- # composed of Money [among other things]" or "Person [is] composed of [an] address". Each call
- # to the macro adds a description of how the value objects are created from the attributes of
- # the entity object (when the entity is initialized either as a new object or from finding an
- # existing object) and how it can be turned back into attributes (when the entity is saved to
- # the database).
- #
- # class Customer < ActiveRecord::Base
- # composed_of :balance, class_name: "Money", mapping: %w(amount currency)
- # composed_of :address, mapping: [ %w(address_street street), %w(address_city city) ]
- # end
- #
- # The customer class now has the following methods to manipulate the value objects:
- # * <tt>Customer#balance, Customer#balance=(money)</tt>
- # * <tt>Customer#address, Customer#address=(address)</tt>
- #
- # These methods will operate with value objects like the ones described below:
- #
- # class Money
- # include Comparable
- # attr_reader :amount, :currency
- # EXCHANGE_RATES = { "USD_TO_DKK" => 6 }
- #
- # def initialize(amount, currency = "USD")
- # @amount, @currency = amount, currency
- # end
- #
- # def exchange_to(other_currency)
- # exchanged_amount = (amount * EXCHANGE_RATES["#{currency}_TO_#{other_currency}"]).floor
- # Money.new(exchanged_amount, other_currency)
- # end
- #
- # def ==(other_money)
- # amount == other_money.amount && currency == other_money.currency
- # end
- #
- # def <=>(other_money)
- # if currency == other_money.currency
- # amount <=> other_money.amount
- # else
- # amount <=> other_money.exchange_to(currency).amount
- # end
- # end
- # end
- #
- # class Address
- # attr_reader :street, :city
- # def initialize(street, city)
- # @street, @city = street, city
- # end
- #
- # def close_to?(other_address)
- # city == other_address.city
- # end
- #
- # def ==(other_address)
- # city == other_address.city && street == other_address.street
- # end
- # end
- #
- # Now it's possible to access attributes from the database through the value objects instead. If
- # you choose to name the composition the same as the attribute's name, it will be the only way to
- # access that attribute. That's the case with our +balance+ attribute. You interact with the value
- # objects just like you would with any other attribute:
- #
- # customer.balance = Money.new(20) # sets the Money value object and the attribute
- # customer.balance # => Money value object
- # customer.balance.exchange_to("DKK") # => Money.new(120, "DKK")
- # customer.balance > Money.new(10) # => true
- # customer.balance == Money.new(20) # => true
- # customer.balance < Money.new(5) # => false
- #
- # Value objects can also be composed of multiple attributes, such as the case of Address. The order
- # of the mappings will determine the order of the parameters.
- #
- # customer.address_street = "Hyancintvej"
- # customer.address_city = "Copenhagen"
- # customer.address # => Address.new("Hyancintvej", "Copenhagen")
- #
- # customer.address = Address.new("May Street", "Chicago")
- # customer.address_street # => "May Street"
- # customer.address_city # => "Chicago"
- #
- # == Writing value objects
- #
- # Value objects are immutable and interchangeable objects that represent a given value, such as
- # a Money object representing $5. Two Money objects both representing $5 should be equal (through
- # methods such as <tt>==</tt> and <tt><=></tt> from Comparable if ranking makes sense). This is
- # unlike entity objects where equality is determined by identity. An entity class such as Customer can
- # easily have two different objects that both have an address on Hyancintvej. Entity identity is
- # determined by object or relational unique identifiers (such as primary keys). Normal
- # ActiveRecord::Base classes are entity objects.
- #
- # It's also important to treat the value objects as immutable. Don't allow the Money object to have
- # its amount changed after creation. Create a new Money object with the new value instead. The
- # <tt>Money#exchange_to</tt> method is an example of this. It returns a new value object instead of changing
- # its own values. Active Record won't persist value objects that have been changed through means
- # other than the writer method.
- #
- # The immutable requirement is enforced by Active Record by freezing any object assigned as a value
- # object. Attempting to change it afterwards will result in a +RuntimeError+.
- #
- # Read more about value objects on http://c2.com/cgi/wiki?ValueObject and on the dangers of not
- # keeping value objects immutable on http://c2.com/cgi/wiki?ValueObjectsShouldBeImmutable
- #
- # == Custom constructors and converters
- #
- # By default value objects are initialized by calling the <tt>new</tt> constructor of the value
- # class passing each of the mapped attributes, in the order specified by the <tt>:mapping</tt>
- # option, as arguments. If the value class doesn't support this convention then #composed_of allows
- # a custom constructor to be specified.
- #
- # When a new value is assigned to the value object, the default assumption is that the new value
- # is an instance of the value class. Specifying a custom converter allows the new value to be automatically
- # converted to an instance of value class if necessary.
- #
- # For example, the +NetworkResource+ model has +network_address+ and +cidr_range+ attributes that should be
- # aggregated using the +NetAddr::CIDR+ value class (http://www.rubydoc.info/gems/netaddr/1.5.0/NetAddr/CIDR).
- # The constructor for the value class is called +create+ and it expects a CIDR address string as a parameter.
- # New values can be assigned to the value object using either another +NetAddr::CIDR+ object, a string
- # or an array. The <tt>:constructor</tt> and <tt>:converter</tt> options can be used to meet
- # these requirements:
- #
- # class NetworkResource < ActiveRecord::Base
- # composed_of :cidr,
- # class_name: 'NetAddr::CIDR',
- # mapping: [ %w(network_address network), %w(cidr_range bits) ],
- # allow_nil: true,
- # constructor: Proc.new { |network_address, cidr_range| NetAddr::CIDR.create("#{network_address}/#{cidr_range}") },
- # converter: Proc.new { |value| NetAddr::CIDR.create(value.is_a?(Array) ? value.join('/') : value) }
- # end
- #
- # # This calls the :constructor
- # network_resource = NetworkResource.new(network_address: '192.168.0.1', cidr_range: 24)
- #
- # # These assignments will both use the :converter
- # network_resource.cidr = [ '192.168.2.1', 8 ]
- # network_resource.cidr = '192.168.0.1/24'
- #
- # # This assignment won't use the :converter as the value is already an instance of the value class
- # network_resource.cidr = NetAddr::CIDR.create('192.168.2.1/8')
- #
- # # Saving and then reloading will use the :constructor on reload
- # network_resource.save
- # network_resource.reload
- #
- # == Finding records by a value object
- #
- # Once a #composed_of relationship is specified for a model, records can be loaded from the database
- # by specifying an instance of the value object in the conditions hash. The following example
- # finds all customers with +balance_amount+ equal to 20 and +balance_currency+ equal to "USD":
- #
- # Customer.where(balance: Money.new(20, "USD"))
- #
- module ClassMethods
- # Adds reader and writer methods for manipulating a value object:
- # <tt>composed_of :address</tt> adds <tt>address</tt> and <tt>address=(new_address)</tt> methods.
- #
- # Options are:
- # * <tt>:class_name</tt> - Specifies the class name of the association. Use it only if that name
- # can't be inferred from the part id. So <tt>composed_of :address</tt> will by default be linked
- # to the Address class, but if the real class name is +CompanyAddress+, you'll have to specify it
- # with this option.
- # * <tt>:mapping</tt> - Specifies the mapping of entity attributes to attributes of the value
- # object. Each mapping is represented as an array where the first item is the name of the
- # entity attribute and the second item is the name of the attribute in the value object. The
- # order in which mappings are defined determines the order in which attributes are sent to the
- # value class constructor.
- # * <tt>:allow_nil</tt> - Specifies that the value object will not be instantiated when all mapped
- # attributes are +nil+. Setting the value object to +nil+ has the effect of writing +nil+ to all
- # mapped attributes.
- # This defaults to +false+.
- # * <tt>:constructor</tt> - A symbol specifying the name of the constructor method or a Proc that
- # is called to initialize the value object. The constructor is passed all of the mapped attributes,
- # in the order that they are defined in the <tt>:mapping option</tt>, as arguments and uses them
- # to instantiate a <tt>:class_name</tt> object.
- # The default is <tt>:new</tt>.
- # * <tt>:converter</tt> - A symbol specifying the name of a class method of <tt>:class_name</tt>
- # or a Proc that is called when a new value is assigned to the value object. The converter is
- # passed the single value that is used in the assignment and is only called if the new value is
- # not an instance of <tt>:class_name</tt>. If <tt>:allow_nil</tt> is set to true, the converter
- # can return nil to skip the assignment.
- #
- # Option examples:
- # composed_of :temperature, mapping: %w(reading celsius)
- # composed_of :balance, class_name: "Money", mapping: %w(balance amount),
- # converter: Proc.new { |balance| balance.to_money }
- # composed_of :address, mapping: [ %w(address_street street), %w(address_city city) ]
- # composed_of :gps_location
- # composed_of :gps_location, allow_nil: true
- # composed_of :ip_address,
- # class_name: 'IPAddr',
- # mapping: %w(ip to_i),
- # constructor: Proc.new { |ip| IPAddr.new(ip, Socket::AF_INET) },
- # converter: Proc.new { |ip| ip.is_a?(Integer) ? IPAddr.new(ip, Socket::AF_INET) : IPAddr.new(ip.to_s) }
- #
- def composed_of(part_id, options = {})
- options.assert_valid_keys(:class_name, :mapping, :allow_nil, :constructor, :converter)
+ # Active Record implements aggregation through a macro-like class method called #composed_of
+ # for representing attributes as value objects. It expresses relationships like "Account [is]
+ # composed of Money [among other things]" or "Person [is] composed of [an] address". Each call
+ # to the macro adds a description of how the value objects are created from the attributes of
+ # the entity object (when the entity is initialized either as a new object or from finding an
+ # existing object) and how it can be turned back into attributes (when the entity is saved to
+ # the database).
+ #
+ # class Customer < ActiveRecord::Base
+ # composed_of :balance, class_name: "Money", mapping: %w(balance amount)
+ # composed_of :address, mapping: [ %w(address_street street), %w(address_city city) ]
+ # end
+ #
+ # The customer class now has the following methods to manipulate the value objects:
+ # * <tt>Customer#balance, Customer#balance=(money)</tt>
+ # * <tt>Customer#address, Customer#address=(address)</tt>
+ #
+ # These methods will operate with value objects like the ones described below:
+ #
+ # class Money
+ # include Comparable
+ # attr_reader :amount, :currency
+ # EXCHANGE_RATES = { "USD_TO_DKK" => 6 }
+ #
+ # def initialize(amount, currency = "USD")
+ # @amount, @currency = amount, currency
+ # end
+ #
+ # def exchange_to(other_currency)
+ # exchanged_amount = (amount * EXCHANGE_RATES["#{currency}_TO_#{other_currency}"]).floor
+ # Money.new(exchanged_amount, other_currency)
+ # end
+ #
+ # def ==(other_money)
+ # amount == other_money.amount && currency == other_money.currency
+ # end
+ #
+ # def <=>(other_money)
+ # if currency == other_money.currency
+ # amount <=> other_money.amount
+ # else
+ # amount <=> other_money.exchange_to(currency).amount
+ # end
+ # end
+ # end
+ #
+ # class Address
+ # attr_reader :street, :city
+ # def initialize(street, city)
+ # @street, @city = street, city
+ # end
+ #
+ # def close_to?(other_address)
+ # city == other_address.city
+ # end
+ #
+ # def ==(other_address)
+ # city == other_address.city && street == other_address.street
+ # end
+ # end
+ #
+ # Now it's possible to access attributes from the database through the value objects instead. If
+ # you choose to name the composition the same as the attribute's name, it will be the only way to
+ # access that attribute. That's the case with our +balance+ attribute. You interact with the value
+ # objects just like you would with any other attribute:
+ #
+ # customer.balance = Money.new(20) # sets the Money value object and the attribute
+ # customer.balance # => Money value object
+ # customer.balance.exchange_to("DKK") # => Money.new(120, "DKK")
+ # customer.balance > Money.new(10) # => true
+ # customer.balance == Money.new(20) # => true
+ # customer.balance < Money.new(5) # => false
+ #
+ # Value objects can also be composed of multiple attributes, such as the case of Address. The order
+ # of the mappings will determine the order of the parameters.
+ #
+ # customer.address_street = "Hyancintvej"
+ # customer.address_city = "Copenhagen"
+ # customer.address # => Address.new("Hyancintvej", "Copenhagen")
+ #
+ # customer.address = Address.new("May Street", "Chicago")
+ # customer.address_street # => "May Street"
+ # customer.address_city # => "Chicago"
+ #
+ # == Writing value objects
+ #
+ # Value objects are immutable and interchangeable objects that represent a given value, such as
+ # a Money object representing $5. Two Money objects both representing $5 should be equal (through
+ # methods such as <tt>==</tt> and <tt><=></tt> from Comparable if ranking makes sense). This is
+ # unlike entity objects where equality is determined by identity. An entity class such as Customer can
+ # easily have two different objects that both have an address on Hyancintvej. Entity identity is
+ # determined by object or relational unique identifiers (such as primary keys). Normal
+ # ActiveRecord::Base classes are entity objects.
+ #
+ # It's also important to treat the value objects as immutable. Don't allow the Money object to have
+ # its amount changed after creation. Create a new Money object with the new value instead. The
+ # <tt>Money#exchange_to</tt> method is an example of this. It returns a new value object instead of changing
+ # its own values. Active Record won't persist value objects that have been changed through means
+ # other than the writer method.
+ #
+ # The immutable requirement is enforced by Active Record by freezing any object assigned as a value
+ # object. Attempting to change it afterwards will result in a +RuntimeError+.
+ #
+ # Read more about value objects on http://c2.com/cgi/wiki?ValueObject and on the dangers of not
+ # keeping value objects immutable on http://c2.com/cgi/wiki?ValueObjectsShouldBeImmutable
+ #
+ # == Custom constructors and converters
+ #
+ # By default value objects are initialized by calling the <tt>new</tt> constructor of the value
+ # class passing each of the mapped attributes, in the order specified by the <tt>:mapping</tt>
+ # option, as arguments. If the value class doesn't support this convention then #composed_of allows
+ # a custom constructor to be specified.
+ #
+ # When a new value is assigned to the value object, the default assumption is that the new value
+ # is an instance of the value class. Specifying a custom converter allows the new value to be automatically
+ # converted to an instance of value class if necessary.
+ #
+ # For example, the +NetworkResource+ model has +network_address+ and +cidr_range+ attributes that should be
+ # aggregated using the +NetAddr::CIDR+ value class (http://www.rubydoc.info/gems/netaddr/1.5.0/NetAddr/CIDR).
+ # The constructor for the value class is called +create+ and it expects a CIDR address string as a parameter.
+ # New values can be assigned to the value object using either another +NetAddr::CIDR+ object, a string
+ # or an array. The <tt>:constructor</tt> and <tt>:converter</tt> options can be used to meet
+ # these requirements:
+ #
+ # class NetworkResource < ActiveRecord::Base
+ # composed_of :cidr,
+ # class_name: 'NetAddr::CIDR',
+ # mapping: [ %w(network_address network), %w(cidr_range bits) ],
+ # allow_nil: true,
+ # constructor: Proc.new { |network_address, cidr_range| NetAddr::CIDR.create("#{network_address}/#{cidr_range}") },
+ # converter: Proc.new { |value| NetAddr::CIDR.create(value.is_a?(Array) ? value.join('/') : value) }
+ # end
+ #
+ # # This calls the :constructor
+ # network_resource = NetworkResource.new(network_address: '192.168.0.1', cidr_range: 24)
+ #
+ # # These assignments will both use the :converter
+ # network_resource.cidr = [ '192.168.2.1', 8 ]
+ # network_resource.cidr = '192.168.0.1/24'
+ #
+ # # This assignment won't use the :converter as the value is already an instance of the value class
+ # network_resource.cidr = NetAddr::CIDR.create('192.168.2.1/8')
+ #
+ # # Saving and then reloading will use the :constructor on reload
+ # network_resource.save
+ # network_resource.reload
+ #
+ # == Finding records by a value object
+ #
+ # Once a #composed_of relationship is specified for a model, records can be loaded from the database
+ # by specifying an instance of the value object in the conditions hash. The following example
+ # finds all customers with +address_street+ equal to "May Street" and +address_city+ equal to "Chicago":
+ #
+ # Customer.where(address: Address.new("May Street", "Chicago"))
+ #
+ module ClassMethods
+ # Adds reader and writer methods for manipulating a value object:
+ # <tt>composed_of :address</tt> adds <tt>address</tt> and <tt>address=(new_address)</tt> methods.
+ #
+ # Options are:
+ # * <tt>:class_name</tt> - Specifies the class name of the association. Use it only if that name
+ # can't be inferred from the part id. So <tt>composed_of :address</tt> will by default be linked
+ # to the Address class, but if the real class name is +CompanyAddress+, you'll have to specify it
+ # with this option.
+ # * <tt>:mapping</tt> - Specifies the mapping of entity attributes to attributes of the value
+ # object. Each mapping is represented as an array where the first item is the name of the
+ # entity attribute and the second item is the name of the attribute in the value object. The
+ # order in which mappings are defined determines the order in which attributes are sent to the
+ # value class constructor.
+ # * <tt>:allow_nil</tt> - Specifies that the value object will not be instantiated when all mapped
+ # attributes are +nil+. Setting the value object to +nil+ has the effect of writing +nil+ to all
+ # mapped attributes.
+ # This defaults to +false+.
+ # * <tt>:constructor</tt> - A symbol specifying the name of the constructor method or a Proc that
+ # is called to initialize the value object. The constructor is passed all of the mapped attributes,
+ # in the order that they are defined in the <tt>:mapping option</tt>, as arguments and uses them
+ # to instantiate a <tt>:class_name</tt> object.
+ # The default is <tt>:new</tt>.
+ # * <tt>:converter</tt> - A symbol specifying the name of a class method of <tt>:class_name</tt>
+ # or a Proc that is called when a new value is assigned to the value object. The converter is
+ # passed the single value that is used in the assignment and is only called if the new value is
+ # not an instance of <tt>:class_name</tt>. If <tt>:allow_nil</tt> is set to true, the converter
+ # can return +nil+ to skip the assignment.
+ #
+ # Option examples:
+ # composed_of :temperature, mapping: %w(reading celsius)
+ # composed_of :balance, class_name: "Money", mapping: %w(balance amount)
+ # composed_of :address, mapping: [ %w(address_street street), %w(address_city city) ]
+ # composed_of :gps_location
+ # composed_of :gps_location, allow_nil: true
+ # composed_of :ip_address,
+ # class_name: 'IPAddr',
+ # mapping: %w(ip to_i),
+ # constructor: Proc.new { |ip| IPAddr.new(ip, Socket::AF_INET) },
+ # converter: Proc.new { |ip| ip.is_a?(Integer) ? IPAddr.new(ip, Socket::AF_INET) : IPAddr.new(ip.to_s) }
+ #
+ def composed_of(part_id, options = {})
+ options.assert_valid_keys(:class_name, :mapping, :allow_nil, :constructor, :converter)
- name = part_id.id2name
- class_name = options[:class_name] || name.camelize
- mapping = options[:mapping] || [ name, name ]
- mapping = [ mapping ] unless mapping.first.is_a?(Array)
- allow_nil = options[:allow_nil] || false
- constructor = options[:constructor] || :new
- converter = options[:converter]
+ unless self < Aggregations
+ include Aggregations
+ end
- reader_method(name, class_name, mapping, allow_nil, constructor)
- writer_method(name, class_name, mapping, allow_nil, converter)
+ name = part_id.id2name
+ class_name = options[:class_name] || name.camelize
+ mapping = options[:mapping] || [ name, name ]
+ mapping = [ mapping ] unless mapping.first.is_a?(Array)
+ allow_nil = options[:allow_nil] || false
+ constructor = options[:constructor] || :new
+ converter = options[:converter]
- reflection = ActiveRecord::Reflection.create(:composed_of, part_id, nil, options, self)
- Reflection.add_aggregate_reflection self, part_id, reflection
- end
+ reader_method(name, class_name, mapping, allow_nil, constructor)
+ writer_method(name, class_name, mapping, allow_nil, converter)
- private
- def reader_method(name, class_name, mapping, allow_nil, constructor)
- define_method(name) do
- if @aggregation_cache[name].nil? && (!allow_nil || mapping.any? {|key, _| !_read_attribute(key).nil? })
- attrs = mapping.collect {|key, _| _read_attribute(key)}
- object = constructor.respond_to?(:call) ?
- constructor.call(*attrs) :
- class_name.constantize.send(constructor, *attrs)
- @aggregation_cache[name] = object
- end
- @aggregation_cache[name]
- end
+ reflection = ActiveRecord::Reflection.create(:composed_of, part_id, nil, options, self)
+ Reflection.add_aggregate_reflection self, part_id, reflection
end
- def writer_method(name, class_name, mapping, allow_nil, converter)
- define_method("#{name}=") do |part|
- klass = class_name.constantize
- if part.is_a?(Hash)
- raise ArgumentError unless part.size == part.keys.max
- part = klass.new(*part.sort.map(&:last))
+ private
+ def reader_method(name, class_name, mapping, allow_nil, constructor)
+ define_method(name) do
+ if @aggregation_cache[name].nil? && (!allow_nil || mapping.any? { |key, _| !_read_attribute(key).nil? })
+ attrs = mapping.collect { |key, _| _read_attribute(key) }
+ object = constructor.respond_to?(:call) ?
+ constructor.call(*attrs) :
+ class_name.constantize.send(constructor, *attrs)
+ @aggregation_cache[name] = object
+ end
+ @aggregation_cache[name]
end
+ end
- unless part.is_a?(klass) || converter.nil? || part.nil?
- part = converter.respond_to?(:call) ? converter.call(part) : klass.send(converter, part)
- end
+ def writer_method(name, class_name, mapping, allow_nil, converter)
+ define_method("#{name}=") do |part|
+ klass = class_name.constantize
- if part.nil? && allow_nil
- mapping.each { |key, _| self[key] = nil }
- @aggregation_cache[name] = nil
- else
- mapping.each { |key, value| self[key] = part.send(value) }
- @aggregation_cache[name] = part.freeze
+ unless part.is_a?(klass) || converter.nil? || part.nil?
+ part = converter.respond_to?(:call) ? converter.call(part) : klass.send(converter, part)
+ end
+
+ hash_from_multiparameter_assignment = part.is_a?(Hash) &&
+ part.each_key.all? { |k| k.is_a?(Integer) }
+ if hash_from_multiparameter_assignment
+ raise ArgumentError unless part.size == part.each_key.max
+ part = klass.new(*part.sort.map(&:last))
+ end
+
+ if part.nil? && allow_nil
+ mapping.each { |key, _| self[key] = nil }
+ @aggregation_cache[name] = nil
+ else
+ mapping.each { |key, value| self[key] = part.send(value) }
+ @aggregation_cache[name] = part.freeze
+ end
end
end
- end
- end
+ end
end
end
diff --git a/activerecord/lib/active_record/association_relation.rb b/activerecord/lib/active_record/association_relation.rb
index c18e88e4cf..4c538ef2bd 100644
--- a/activerecord/lib/active_record/association_relation.rb
+++ b/activerecord/lib/active_record/association_relation.rb
@@ -1,7 +1,9 @@
+# frozen_string_literal: true
+
module ActiveRecord
class AssociationRelation < Relation
- def initialize(klass, table, predicate_builder, association)
- super(klass, table, predicate_builder)
+ def initialize(klass, association)
+ super(klass)
@association = association
end
@@ -28,8 +30,11 @@ module ActiveRecord
private
- def exec_queries
- super.each { |r| @association.set_inverse_instance r }
- end
+ def exec_queries
+ super do |record|
+ @association.set_inverse_instance_from_queries(record)
+ yield record if block_given?
+ end
+ end
end
end
diff --git a/activerecord/lib/active_record/associations.rb b/activerecord/lib/active_record/associations.rb
index 77d17fc975..64c20adc87 100644
--- a/activerecord/lib/active_record/associations.rb
+++ b/activerecord/lib/active_record/associations.rb
@@ -1,7 +1,9 @@
-require 'active_support/core_ext/enumerable'
-require 'active_support/core_ext/string/conversions'
-require 'active_support/core_ext/module/remove_method'
-require 'active_record/errors'
+# frozen_string_literal: true
+
+require "active_support/core_ext/enumerable"
+require "active_support/core_ext/string/conversions"
+require "active_support/core_ext/module/remove_method"
+require "active_record/errors"
module ActiveRecord
class AssociationNotFoundError < ConfigurationError #:nodoc:
@@ -90,13 +92,23 @@ module ActiveRecord
through_reflection = reflection.through_reflection
source_reflection_names = reflection.source_reflection_names
source_associations = reflection.through_reflection.klass._reflections.keys
- super("Could not find the source association(s) #{source_reflection_names.collect(&:inspect).to_sentence(:two_words_connector => ' or ', :last_word_connector => ', or ', :locale => :en)} in model #{through_reflection.klass}. Try 'has_many #{reflection.name.inspect}, :through => #{through_reflection.name.inspect}, :source => <name>'. Is it one of #{source_associations.to_sentence(:two_words_connector => ' or ', :last_word_connector => ', or ', :locale => :en)}?")
+ super("Could not find the source association(s) #{source_reflection_names.collect(&:inspect).to_sentence(two_words_connector: ' or ', last_word_connector: ', or ', locale: :en)} in model #{through_reflection.klass}. Try 'has_many #{reflection.name.inspect}, :through => #{through_reflection.name.inspect}, :source => <name>'. Is it one of #{source_associations.to_sentence(two_words_connector: ' or ', last_word_connector: ', or ', locale: :en)}?")
else
super("Could not find the source association(s).")
end
end
end
+ class HasManyThroughOrderError < ActiveRecordError #:nodoc:
+ def initialize(owner_class_name = nil, reflection = nil, through_reflection = nil)
+ if owner_class_name && reflection && through_reflection
+ super("Cannot have a has_many :through association '#{owner_class_name}##{reflection.name}' which goes through '#{owner_class_name}##{through_reflection.name}' before the through association is defined.")
+ else
+ super("Cannot have a has_many :through association before the through association is defined.")
+ end
+ end
+ end
+
class ThroughCantAssociateThroughHasOneOrManyReflection < ActiveRecordError #:nodoc:
def initialize(owner = nil, reflection = nil)
if owner && reflection
@@ -107,30 +119,25 @@ module ActiveRecord
end
end
- class HasManyThroughCantAssociateThroughHasOneOrManyReflection < ThroughCantAssociateThroughHasOneOrManyReflection #:nodoc:
- end
+ class AmbiguousSourceReflectionForThroughAssociation < ActiveRecordError # :nodoc:
+ def initialize(klass, macro, association_name, options, possible_sources)
+ example_options = options.dup
+ example_options[:source] = possible_sources.first
- class HasOneThroughCantAssociateThroughHasOneOrManyReflection < ThroughCantAssociateThroughHasOneOrManyReflection #:nodoc:
+ super("Ambiguous source reflection for through association. Please " \
+ "specify a :source directive on your declaration like:\n" \
+ "\n" \
+ " class #{klass} < ActiveRecord::Base\n" \
+ " #{macro} :#{association_name}, #{example_options}\n" \
+ " end"
+ )
+ end
end
- class HasManyThroughCantAssociateNewRecords < ActiveRecordError #:nodoc:
- def initialize(owner = nil, reflection = nil)
- if owner && reflection
- super("Cannot associate new records through '#{owner.class.name}##{reflection.name}' on '#{reflection.source_reflection.class_name rescue nil}##{reflection.source_reflection.name rescue nil}'. Both records must have an id in order to create the has_many :through record associating them.")
- else
- super("Cannot associate new records.")
- end
- end
+ class HasManyThroughCantAssociateThroughHasOneOrManyReflection < ThroughCantAssociateThroughHasOneOrManyReflection #:nodoc:
end
- class HasManyThroughCantDissociateNewRecords < ActiveRecordError #:nodoc:
- def initialize(owner = nil, reflection = nil)
- if owner && reflection
- super("Cannot dissociate new records through '#{owner.class.name}##{reflection.name}' on '#{reflection.source_reflection.class_name rescue nil}##{reflection.source_reflection.name rescue nil}'. Both records must have an id in order to delete the has_many :through record associating them.")
- else
- super("Cannot dissociate new records.")
- end
- end
+ class HasOneThroughCantAssociateThroughHasOneOrManyReflection < ThroughCantAssociateThroughHasOneOrManyReflection #:nodoc:
end
class ThroughNestedAssociationsAreReadonly < ActiveRecordError #:nodoc:
@@ -162,16 +169,6 @@ module ActiveRecord
end
end
- class ReadOnlyAssociation < ActiveRecordError #:nodoc:
- def initialize(reflection = nil)
- if reflection
- super("Cannot add to a has_many :through association. Try adding to #{reflection.through_reflection.name.inspect}.")
- else
- super("Read-only reflection error.")
- end
- end
- end
-
# This error is raised when trying to destroy a parent instance in N:1 or 1:1 associations
# (has_many, has_one) when there is at least 1 child associated instance.
# ex: if @project.tasks.size > 0, DeleteRestrictionError will be raised when trying to destroy @project
@@ -197,33 +194,38 @@ module ActiveRecord
autoload :CollectionAssociation
autoload :ForeignAssociation
autoload :CollectionProxy
-
- autoload :BelongsToAssociation
- autoload :BelongsToPolymorphicAssociation
- autoload :HasManyAssociation
- autoload :HasManyThroughAssociation
- autoload :HasOneAssociation
- autoload :HasOneThroughAssociation
autoload :ThroughAssociation
module Builder #:nodoc:
- autoload :Association, 'active_record/associations/builder/association'
- autoload :SingularAssociation, 'active_record/associations/builder/singular_association'
- autoload :CollectionAssociation, 'active_record/associations/builder/collection_association'
+ autoload :Association, "active_record/associations/builder/association"
+ autoload :SingularAssociation, "active_record/associations/builder/singular_association"
+ autoload :CollectionAssociation, "active_record/associations/builder/collection_association"
- autoload :BelongsTo, 'active_record/associations/builder/belongs_to'
- autoload :HasOne, 'active_record/associations/builder/has_one'
- autoload :HasMany, 'active_record/associations/builder/has_many'
- autoload :HasAndBelongsToMany, 'active_record/associations/builder/has_and_belongs_to_many'
+ autoload :BelongsTo, "active_record/associations/builder/belongs_to"
+ autoload :HasOne, "active_record/associations/builder/has_one"
+ autoload :HasMany, "active_record/associations/builder/has_many"
+ autoload :HasAndBelongsToMany, "active_record/associations/builder/has_and_belongs_to_many"
end
eager_autoload do
+ autoload :BelongsToAssociation
+ autoload :BelongsToPolymorphicAssociation
+ autoload :HasManyAssociation
+ autoload :HasManyThroughAssociation
+ autoload :HasOneAssociation
+ autoload :HasOneThroughAssociation
+
autoload :Preloader
autoload :JoinDependency
autoload :AssociationScope
autoload :AliasTracker
end
+ def self.eager_load!
+ super
+ Preloader.eager_load!
+ end
+
# Returns the association instance for the given name, instantiating it if it doesn't already exist
def association(name) #:nodoc:
association = association_instance_get(name)
@@ -239,7 +241,7 @@ module ActiveRecord
association
end
- def association_cached?(name) # :nodoc
+ def association_cached?(name) # :nodoc:
@association_cache.key?(name)
end
@@ -255,16 +257,16 @@ module ActiveRecord
private
# Clears out the association cache.
- def clear_association_cache # :nodoc:
+ def clear_association_cache
@association_cache.clear if persisted?
end
- def init_internals # :nodoc:
+ def init_internals
@association_cache = {}
super
end
- # Returns the specified association instance if it exists, nil otherwise.
+ # Returns the specified association instance if it exists, +nil+ otherwise.
def association_instance_get(name)
@association_cache[name]
end
@@ -274,1556 +276,1590 @@ module ActiveRecord
@association_cache[name] = association
end
- # \Associations are a set of macro-like class methods for tying objects together through
- # foreign keys. They express relationships like "Project has one Project Manager"
- # or "Project belongs to a Portfolio". Each macro adds a number of methods to the
- # class which are specialized according to the collection or association symbol and the
- # options hash. It works much the same way as Ruby's own <tt>attr*</tt>
- # methods.
- #
- # class Project < ActiveRecord::Base
- # belongs_to :portfolio
- # has_one :project_manager
- # has_many :milestones
- # has_and_belongs_to_many :categories
- # end
- #
- # The project class now has the following methods (and more) to ease the traversal and
- # manipulation of its relationships:
- # * <tt>Project#portfolio, Project#portfolio=(portfolio), Project#portfolio.nil?</tt>
- # * <tt>Project#project_manager, Project#project_manager=(project_manager), Project#project_manager.nil?,</tt>
- # * <tt>Project#milestones.empty?, Project#milestones.size, Project#milestones, Project#milestones<<(milestone),</tt>
- # <tt>Project#milestones.delete(milestone), Project#milestones.destroy(milestone), Project#milestones.find(milestone_id),</tt>
- # <tt>Project#milestones.build, Project#milestones.create</tt>
- # * <tt>Project#categories.empty?, Project#categories.size, Project#categories, Project#categories<<(category1),</tt>
- # <tt>Project#categories.delete(category1), Project#categories.destroy(category1)</tt>
- #
- # === A word of warning
- #
- # Don't create associations that have the same name as instance methods of
- # ActiveRecord::Base. Since the association adds a method with that name to
- # its model, it will override the inherited method and break things.
- # For instance, +attributes+ and +connection+ would be bad choices for association names.
- #
- # == Auto-generated methods
- # See also Instance Public methods below for more details.
- #
- # === Singular associations (one-to-one)
- # | | belongs_to |
- # generated methods | belongs_to | :polymorphic | has_one
- # ----------------------------------+------------+--------------+---------
- # other(force_reload=false) | X | X | X
- # other=(other) | X | X | X
- # build_other(attributes={}) | X | | X
- # create_other(attributes={}) | X | | X
- # create_other!(attributes={}) | X | | X
- #
- # ===Collection associations (one-to-many / many-to-many)
- # | | | has_many
- # generated methods | habtm | has_many | :through
- # ----------------------------------+-------+----------+----------
- # others(force_reload=false) | X | X | X
- # others=(other,other,...) | X | X | X
- # other_ids | X | X | X
- # other_ids=(id,id,...) | X | X | X
- # others<< | X | X | X
- # others.push | X | X | X
- # others.concat | X | X | X
- # others.build(attributes={}) | X | X | X
- # others.create(attributes={}) | X | X | X
- # others.create!(attributes={}) | X | X | X
- # others.size | X | X | X
- # others.length | X | X | X
- # others.count | X | X | X
- # others.sum(*args) | X | X | X
- # others.empty? | X | X | X
- # others.clear | X | X | X
- # others.delete(other,other,...) | X | X | X
- # others.delete_all | X | X | X
- # others.destroy(other,other,...) | X | X | X
- # others.destroy_all | X | X | X
- # others.find(*args) | X | X | X
- # others.exists? | X | X | X
- # others.distinct | X | X | X
- # others.reset | X | X | X
- #
- # === Overriding generated methods
- #
- # Association methods are generated in a module that is included into the model class,
- # which allows you to easily override with your own methods and call the original
- # generated method with +super+. For example:
- #
- # class Car < ActiveRecord::Base
- # belongs_to :owner
- # belongs_to :old_owner
- # def owner=(new_owner)
- # self.old_owner = self.owner
- # super
- # end
- # end
- #
- # If your model class is <tt>Project</tt>, the module is
- # named <tt>Project::GeneratedAssociationMethods</tt>. The +GeneratedAssociationMethods+ module is
- # included in the model class immediately after the (anonymous) generated attributes methods
- # module, meaning an association will override the methods for an attribute with the same name.
- #
- # == Cardinality and associations
- #
- # Active Record associations can be used to describe one-to-one, one-to-many and many-to-many
- # relationships between models. Each model uses an association to describe its role in
- # the relation. The #belongs_to association is always used in the model that has
- # the foreign key.
- #
- # === One-to-one
- #
- # Use #has_one in the base, and #belongs_to in the associated model.
- #
- # class Employee < ActiveRecord::Base
- # has_one :office
- # end
- # class Office < ActiveRecord::Base
- # belongs_to :employee # foreign key - employee_id
- # end
- #
- # === One-to-many
- #
- # Use #has_many in the base, and #belongs_to in the associated model.
- #
- # class Manager < ActiveRecord::Base
- # has_many :employees
- # end
- # class Employee < ActiveRecord::Base
- # belongs_to :manager # foreign key - manager_id
- # end
- #
- # === Many-to-many
- #
- # There are two ways to build a many-to-many relationship.
- #
- # The first way uses a #has_many association with the <tt>:through</tt> option and a join model, so
- # there are two stages of associations.
- #
- # class Assignment < ActiveRecord::Base
- # belongs_to :programmer # foreign key - programmer_id
- # belongs_to :project # foreign key - project_id
- # end
- # class Programmer < ActiveRecord::Base
- # has_many :assignments
- # has_many :projects, through: :assignments
- # end
- # class Project < ActiveRecord::Base
- # has_many :assignments
- # has_many :programmers, through: :assignments
- # end
- #
- # For the second way, use #has_and_belongs_to_many in both models. This requires a join table
- # that has no corresponding model or primary key.
- #
- # class Programmer < ActiveRecord::Base
- # has_and_belongs_to_many :projects # foreign keys in the join table
- # end
- # class Project < ActiveRecord::Base
- # has_and_belongs_to_many :programmers # foreign keys in the join table
- # end
- #
- # Choosing which way to build a many-to-many relationship is not always simple.
- # If you need to work with the relationship model as its own entity,
- # use #has_many <tt>:through</tt>. Use #has_and_belongs_to_many when working with legacy schemas or when
- # you never work directly with the relationship itself.
- #
- # == Is it a #belongs_to or #has_one association?
- #
- # Both express a 1-1 relationship. The difference is mostly where to place the foreign
- # key, which goes on the table for the class declaring the #belongs_to relationship.
- #
- # class User < ActiveRecord::Base
- # # I reference an account.
- # belongs_to :account
- # end
- #
- # class Account < ActiveRecord::Base
- # # One user references me.
- # has_one :user
- # end
- #
- # The tables for these classes could look something like:
- #
- # CREATE TABLE users (
- # id int NOT NULL auto_increment,
- # account_id int default NULL,
- # name varchar default NULL,
- # PRIMARY KEY (id)
- # )
- #
- # CREATE TABLE accounts (
- # id int NOT NULL auto_increment,
- # name varchar default NULL,
- # PRIMARY KEY (id)
- # )
- #
- # == Unsaved objects and associations
- #
- # You can manipulate objects and associations before they are saved to the database, but
- # there is some special behavior you should be aware of, mostly involving the saving of
- # associated objects.
- #
- # You can set the <tt>:autosave</tt> option on a #has_one, #belongs_to,
- # #has_many, or #has_and_belongs_to_many association. Setting it
- # to +true+ will _always_ save the members, whereas setting it to +false+ will
- # _never_ save the members. More details about <tt>:autosave</tt> option is available at
- # AutosaveAssociation.
- #
- # === One-to-one associations
- #
- # * Assigning an object to a #has_one association automatically saves that object and
- # the object being replaced (if there is one), in order to update their foreign
- # keys - except if the parent object is unsaved (<tt>new_record? == true</tt>).
- # * If either of these saves fail (due to one of the objects being invalid), an
- # ActiveRecord::RecordNotSaved exception is raised and the assignment is
- # cancelled.
- # * If you wish to assign an object to a #has_one association without saving it,
- # use the <tt>#build_association</tt> method (documented below). The object being
- # replaced will still be saved to update its foreign key.
- # * Assigning an object to a #belongs_to association does not save the object, since
- # the foreign key field belongs on the parent. It does not save the parent either.
- #
- # === Collections
- #
- # * Adding an object to a collection (#has_many or #has_and_belongs_to_many) automatically
- # saves that object, except if the parent object (the owner of the collection) is not yet
- # stored in the database.
- # * If saving any of the objects being added to a collection (via <tt>push</tt> or similar)
- # fails, then <tt>push</tt> returns +false+.
- # * If saving fails while replacing the collection (via <tt>association=</tt>), an
- # ActiveRecord::RecordNotSaved exception is raised and the assignment is
- # cancelled.
- # * You can add an object to a collection without automatically saving it by using the
- # <tt>collection.build</tt> method (documented below).
- # * All unsaved (<tt>new_record? == true</tt>) members of the collection are automatically
- # saved when the parent is saved.
- #
- # == Customizing the query
- #
- # \Associations are built from <tt>Relation</tt> objects, and you can use the Relation syntax
- # to customize them. For example, to add a condition:
- #
- # class Blog < ActiveRecord::Base
- # has_many :published_posts, -> { where(published: true) }, class_name: 'Post'
- # end
- #
- # Inside the <tt>-> { ... }</tt> block you can use all of the usual Relation methods.
- #
- # === Accessing the owner object
- #
- # Sometimes it is useful to have access to the owner object when building the query. The owner
- # is passed as a parameter to the block. For example, the following association would find all
- # events that occur on the user's birthday:
- #
- # class User < ActiveRecord::Base
- # has_many :birthday_events, ->(user) { where(starts_on: user.birthday) }, class_name: 'Event'
- # end
- #
- # Note: Joining, eager loading and preloading of these associations is not fully possible.
- # These operations happen before instance creation and the scope will be called with a +nil+ argument.
- # This can lead to unexpected behavior and is deprecated.
- #
- # == Association callbacks
- #
- # Similar to the normal callbacks that hook into the life cycle of an Active Record object,
- # you can also define callbacks that get triggered when you add an object to or remove an
- # object from an association collection.
- #
- # class Project
- # has_and_belongs_to_many :developers, after_add: :evaluate_velocity
- #
- # def evaluate_velocity(developer)
- # ...
- # end
- # end
- #
- # It's possible to stack callbacks by passing them as an array. Example:
- #
- # class Project
- # has_and_belongs_to_many :developers,
- # after_add: [:evaluate_velocity, Proc.new { |p, d| p.shipping_date = Time.now}]
- # end
- #
- # Possible callbacks are: +before_add+, +after_add+, +before_remove+ and +after_remove+.
- #
- # If any of the +before_add+ callbacks throw an exception, the object will not be
- # added to the collection.
- #
- # Similarly, if any of the +before_remove+ callbacks throw an exception, the object
- # will not be removed from the collection.
- #
- # == Association extensions
- #
- # The proxy objects that control the access to associations can be extended through anonymous
- # modules. This is especially beneficial for adding new finders, creators, and other
- # factory-type methods that are only used as part of this association.
- #
- # class Account < ActiveRecord::Base
- # has_many :people do
- # def find_or_create_by_name(name)
- # first_name, last_name = name.split(" ", 2)
- # find_or_create_by(first_name: first_name, last_name: last_name)
- # end
- # end
- # end
- #
- # person = Account.first.people.find_or_create_by_name("David Heinemeier Hansson")
- # person.first_name # => "David"
- # person.last_name # => "Heinemeier Hansson"
- #
- # If you need to share the same extensions between many associations, you can use a named
- # extension module.
- #
- # module FindOrCreateByNameExtension
- # def find_or_create_by_name(name)
- # first_name, last_name = name.split(" ", 2)
- # find_or_create_by(first_name: first_name, last_name: last_name)
- # end
- # end
- #
- # class Account < ActiveRecord::Base
- # has_many :people, -> { extending FindOrCreateByNameExtension }
- # end
- #
- # class Company < ActiveRecord::Base
- # has_many :people, -> { extending FindOrCreateByNameExtension }
- # end
- #
- # Some extensions can only be made to work with knowledge of the association's internals.
- # Extensions can access relevant state using the following methods (where +items+ is the
- # name of the association):
- #
- # * <tt>record.association(:items).owner</tt> - Returns the object the association is part of.
- # * <tt>record.association(:items).reflection</tt> - Returns the reflection object that describes the association.
- # * <tt>record.association(:items).target</tt> - Returns the associated object for #belongs_to and #has_one, or
- # the collection of associated objects for #has_many and #has_and_belongs_to_many.
- #
- # However, inside the actual extension code, you will not have access to the <tt>record</tt> as
- # above. In this case, you can access <tt>proxy_association</tt>. For example,
- # <tt>record.association(:items)</tt> and <tt>record.items.proxy_association</tt> will return
- # the same object, allowing you to make calls like <tt>proxy_association.owner</tt> inside
- # association extensions.
- #
- # == Association Join Models
- #
- # Has Many associations can be configured with the <tt>:through</tt> option to use an
- # explicit join model to retrieve the data. This operates similarly to a
- # #has_and_belongs_to_many association. The advantage is that you're able to add validations,
- # callbacks, and extra attributes on the join model. Consider the following schema:
- #
- # class Author < ActiveRecord::Base
- # has_many :authorships
- # has_many :books, through: :authorships
- # end
- #
- # class Authorship < ActiveRecord::Base
- # belongs_to :author
- # belongs_to :book
- # end
- #
- # @author = Author.first
- # @author.authorships.collect { |a| a.book } # selects all books that the author's authorships belong to
- # @author.books # selects all books by using the Authorship join model
- #
- # You can also go through a #has_many association on the join model:
- #
- # class Firm < ActiveRecord::Base
- # has_many :clients
- # has_many :invoices, through: :clients
- # end
- #
- # class Client < ActiveRecord::Base
- # belongs_to :firm
- # has_many :invoices
- # end
- #
- # class Invoice < ActiveRecord::Base
- # belongs_to :client
- # end
- #
- # @firm = Firm.first
- # @firm.clients.flat_map { |c| c.invoices } # select all invoices for all clients of the firm
- # @firm.invoices # selects all invoices by going through the Client join model
- #
- # Similarly you can go through a #has_one association on the join model:
- #
- # class Group < ActiveRecord::Base
- # has_many :users
- # has_many :avatars, through: :users
- # end
- #
- # class User < ActiveRecord::Base
- # belongs_to :group
- # has_one :avatar
- # end
- #
- # class Avatar < ActiveRecord::Base
- # belongs_to :user
- # end
- #
- # @group = Group.first
- # @group.users.collect { |u| u.avatar }.compact # select all avatars for all users in the group
- # @group.avatars # selects all avatars by going through the User join model.
- #
- # An important caveat with going through #has_one or #has_many associations on the
- # join model is that these associations are *read-only*. For example, the following
- # would not work following the previous example:
- #
- # @group.avatars << Avatar.new # this would work if User belonged_to Avatar rather than the other way around
- # @group.avatars.delete(@group.avatars.last) # so would this
- #
- # == Setting Inverses
- #
- # If you are using a #belongs_to on the join model, it is a good idea to set the
- # <tt>:inverse_of</tt> option on the #belongs_to, which will mean that the following example
- # works correctly (where <tt>tags</tt> is a #has_many <tt>:through</tt> association):
- #
- # @post = Post.first
- # @tag = @post.tags.build name: "ruby"
- # @tag.save
- #
- # The last line ought to save the through record (a <tt>Tagging</tt>). This will only work if the
- # <tt>:inverse_of</tt> is set:
- #
- # class Tagging < ActiveRecord::Base
- # belongs_to :post
- # belongs_to :tag, inverse_of: :taggings
- # end
- #
- # If you do not set the <tt>:inverse_of</tt> record, the association will
- # do its best to match itself up with the correct inverse. Automatic
- # inverse detection only works on #has_many, #has_one, and
- # #belongs_to associations.
- #
- # Extra options on the associations, as defined in the
- # <tt>AssociationReflection::INVALID_AUTOMATIC_INVERSE_OPTIONS</tt> constant, will
- # also prevent the association's inverse from being found automatically.
- #
- # The automatic guessing of the inverse association uses a heuristic based
- # on the name of the class, so it may not work for all associations,
- # especially the ones with non-standard names.
- #
- # You can turn off the automatic detection of inverse associations by setting
- # the <tt>:inverse_of</tt> option to <tt>false</tt> like so:
- #
- # class Tagging < ActiveRecord::Base
- # belongs_to :tag, inverse_of: false
- # end
- #
- # == Nested \Associations
- #
- # You can actually specify *any* association with the <tt>:through</tt> option, including an
- # association which has a <tt>:through</tt> option itself. For example:
- #
- # class Author < ActiveRecord::Base
- # has_many :posts
- # has_many :comments, through: :posts
- # has_many :commenters, through: :comments
- # end
- #
- # class Post < ActiveRecord::Base
- # has_many :comments
- # end
- #
- # class Comment < ActiveRecord::Base
- # belongs_to :commenter
- # end
- #
- # @author = Author.first
- # @author.commenters # => People who commented on posts written by the author
- #
- # An equivalent way of setting up this association this would be:
- #
- # class Author < ActiveRecord::Base
- # has_many :posts
- # has_many :commenters, through: :posts
- # end
- #
- # class Post < ActiveRecord::Base
- # has_many :comments
- # has_many :commenters, through: :comments
- # end
- #
- # class Comment < ActiveRecord::Base
- # belongs_to :commenter
- # end
- #
- # When using a nested association, you will not be able to modify the association because there
- # is not enough information to know what modification to make. For example, if you tried to
- # add a <tt>Commenter</tt> in the example above, there would be no way to tell how to set up the
- # intermediate <tt>Post</tt> and <tt>Comment</tt> objects.
- #
- # == Polymorphic \Associations
- #
- # Polymorphic associations on models are not restricted on what types of models they
- # can be associated with. Rather, they specify an interface that a #has_many association
- # must adhere to.
- #
- # class Asset < ActiveRecord::Base
- # belongs_to :attachable, polymorphic: true
- # end
- #
- # class Post < ActiveRecord::Base
- # has_many :assets, as: :attachable # The :as option specifies the polymorphic interface to use.
- # end
- #
- # @asset.attachable = @post
- #
- # This works by using a type column in addition to a foreign key to specify the associated
- # record. In the Asset example, you'd need an +attachable_id+ integer column and an
- # +attachable_type+ string column.
- #
- # Using polymorphic associations in combination with single table inheritance (STI) is
- # a little tricky. In order for the associations to work as expected, ensure that you
- # store the base model for the STI models in the type column of the polymorphic
- # association. To continue with the asset example above, suppose there are guest posts
- # and member posts that use the posts table for STI. In this case, there must be a +type+
- # column in the posts table.
- #
- # Note: The <tt>attachable_type=</tt> method is being called when assigning an +attachable+.
- # The +class_name+ of the +attachable+ is passed as a String.
- #
- # class Asset < ActiveRecord::Base
- # belongs_to :attachable, polymorphic: true
- #
- # def attachable_type=(class_name)
- # super(class_name.constantize.base_class.to_s)
- # end
- # end
- #
- # class Post < ActiveRecord::Base
- # # because we store "Post" in attachable_type now dependent: :destroy will work
- # has_many :assets, as: :attachable, dependent: :destroy
- # end
- #
- # class GuestPost < Post
- # end
- #
- # class MemberPost < Post
- # end
- #
- # == Caching
- #
- # All of the methods are built on a simple caching principle that will keep the result
- # of the last query around unless specifically instructed not to. The cache is even
- # shared across methods to make it even cheaper to use the macro-added methods without
- # worrying too much about performance at the first go.
- #
- # project.milestones # fetches milestones from the database
- # project.milestones.size # uses the milestone cache
- # project.milestones.empty? # uses the milestone cache
- # project.milestones(true).size # fetches milestones from the database
- # project.milestones # uses the milestone cache
- #
- # == Eager loading of associations
- #
- # Eager loading is a way to find objects of a certain class and a number of named associations.
- # It is one of the easiest ways to prevent the dreaded N+1 problem in which fetching 100
- # posts that each need to display their author triggers 101 database queries. Through the
- # use of eager loading, the number of queries will be reduced from 101 to 2.
- #
- # class Post < ActiveRecord::Base
- # belongs_to :author
- # has_many :comments
- # end
- #
- # Consider the following loop using the class above:
- #
- # Post.all.each do |post|
- # puts "Post: " + post.title
- # puts "Written by: " + post.author.name
- # puts "Last comment on: " + post.comments.first.created_on
- # end
- #
- # To iterate over these one hundred posts, we'll generate 201 database queries. Let's
- # first just optimize it for retrieving the author:
- #
- # Post.includes(:author).each do |post|
- #
- # This references the name of the #belongs_to association that also used the <tt>:author</tt>
- # symbol. After loading the posts, find will collect the +author_id+ from each one and load
- # all the referenced authors with one query. Doing so will cut down the number of queries
- # from 201 to 102.
- #
- # We can improve upon the situation further by referencing both associations in the finder with:
- #
- # Post.includes(:author, :comments).each do |post|
- #
- # This will load all comments with a single query. This reduces the total number of queries
- # to 3. In general, the number of queries will be 1 plus the number of associations
- # named (except if some of the associations are polymorphic #belongs_to - see below).
- #
- # To include a deep hierarchy of associations, use a hash:
- #
- # Post.includes(:author, { comments: { author: :gravatar } }).each do |post|
- #
- # The above code will load all the comments and all of their associated
- # authors and gravatars. You can mix and match any combination of symbols,
- # arrays, and hashes to retrieve the associations you want to load.
- #
- # All of this power shouldn't fool you into thinking that you can pull out huge amounts
- # of data with no performance penalty just because you've reduced the number of queries.
- # The database still needs to send all the data to Active Record and it still needs to
- # be processed. So it's no catch-all for performance problems, but it's a great way to
- # cut down on the number of queries in a situation as the one described above.
- #
- # Since only one table is loaded at a time, conditions or orders cannot reference tables
- # other than the main one. If this is the case, Active Record falls back to the previously
- # used LEFT OUTER JOIN based strategy. For example:
- #
- # Post.includes([:author, :comments]).where(['comments.approved = ?', true])
- #
- # This will result in a single SQL query with joins along the lines of:
- # <tt>LEFT OUTER JOIN comments ON comments.post_id = posts.id</tt> and
- # <tt>LEFT OUTER JOIN authors ON authors.id = posts.author_id</tt>. Note that using conditions
- # like this can have unintended consequences.
- # In the above example posts with no approved comments are not returned at all, because
- # the conditions apply to the SQL statement as a whole and not just to the association.
- #
- # You must disambiguate column references for this fallback to happen, for example
- # <tt>order: "author.name DESC"</tt> will work but <tt>order: "name DESC"</tt> will not.
- #
- # If you want to load all posts (including posts with no approved comments) then write
- # your own LEFT OUTER JOIN query using ON
- #
- # Post.joins("LEFT OUTER JOIN comments ON comments.post_id = posts.id AND comments.approved = '1'")
- #
- # In this case it is usually more natural to include an association which has conditions defined on it:
- #
- # class Post < ActiveRecord::Base
- # has_many :approved_comments, -> { where(approved: true) }, class_name: 'Comment'
- # end
- #
- # Post.includes(:approved_comments)
- #
- # This will load posts and eager load the +approved_comments+ association, which contains
- # only those comments that have been approved.
- #
- # If you eager load an association with a specified <tt>:limit</tt> option, it will be ignored,
- # returning all the associated objects:
- #
- # class Picture < ActiveRecord::Base
- # has_many :most_recent_comments, -> { order('id DESC').limit(10) }, class_name: 'Comment'
- # end
- #
- # Picture.includes(:most_recent_comments).first.most_recent_comments # => returns all associated comments.
- #
- # Eager loading is supported with polymorphic associations.
- #
- # class Address < ActiveRecord::Base
- # belongs_to :addressable, polymorphic: true
- # end
- #
- # A call that tries to eager load the addressable model
- #
- # Address.includes(:addressable)
- #
- # This will execute one query to load the addresses and load the addressables with one
- # query per addressable type.
- # For example if all the addressables are either of class Person or Company then a total
- # of 3 queries will be executed. The list of addressable types to load is determined on
- # the back of the addresses loaded. This is not supported if Active Record has to fallback
- # to the previous implementation of eager loading and will raise ActiveRecord::EagerLoadPolymorphicError.
- # The reason is that the parent model's type is a column value so its corresponding table
- # name cannot be put in the +FROM+/+JOIN+ clauses of that query.
- #
- # == Table Aliasing
- #
- # Active Record uses table aliasing in the case that a table is referenced multiple times
- # in a join. If a table is referenced only once, the standard table name is used. The
- # second time, the table is aliased as <tt>#{reflection_name}_#{parent_table_name}</tt>.
- # Indexes are appended for any more successive uses of the table name.
- #
- # Post.joins(:comments)
- # # => SELECT ... FROM posts INNER JOIN comments ON ...
- # Post.joins(:special_comments) # STI
- # # => SELECT ... FROM posts INNER JOIN comments ON ... AND comments.type = 'SpecialComment'
- # Post.joins(:comments, :special_comments) # special_comments is the reflection name, posts is the parent table name
- # # => SELECT ... FROM posts INNER JOIN comments ON ... INNER JOIN comments special_comments_posts
- #
- # Acts as tree example:
- #
- # TreeMixin.joins(:children)
- # # => SELECT ... FROM mixins INNER JOIN mixins childrens_mixins ...
- # TreeMixin.joins(children: :parent)
- # # => SELECT ... FROM mixins INNER JOIN mixins childrens_mixins ...
- # INNER JOIN parents_mixins ...
- # TreeMixin.joins(children: {parent: :children})
- # # => SELECT ... FROM mixins INNER JOIN mixins childrens_mixins ...
- # INNER JOIN parents_mixins ...
- # INNER JOIN mixins childrens_mixins_2
- #
- # Has and Belongs to Many join tables use the same idea, but add a <tt>_join</tt> suffix:
- #
- # Post.joins(:categories)
- # # => SELECT ... FROM posts INNER JOIN categories_posts ... INNER JOIN categories ...
- # Post.joins(categories: :posts)
- # # => SELECT ... FROM posts INNER JOIN categories_posts ... INNER JOIN categories ...
- # INNER JOIN categories_posts posts_categories_join INNER JOIN posts posts_categories
- # Post.joins(categories: {posts: :categories})
- # # => SELECT ... FROM posts INNER JOIN categories_posts ... INNER JOIN categories ...
- # INNER JOIN categories_posts posts_categories_join INNER JOIN posts posts_categories
- # INNER JOIN categories_posts categories_posts_join INNER JOIN categories categories_posts_2
- #
- # If you wish to specify your own custom joins using ActiveRecord::QueryMethods#joins method, those table
- # names will take precedence over the eager associations:
- #
- # Post.joins(:comments).joins("inner join comments ...")
- # # => SELECT ... FROM posts INNER JOIN comments_posts ON ... INNER JOIN comments ...
- # Post.joins(:comments, :special_comments).joins("inner join comments ...")
- # # => SELECT ... FROM posts INNER JOIN comments comments_posts ON ...
- # INNER JOIN comments special_comments_posts ...
- # INNER JOIN comments ...
- #
- # Table aliases are automatically truncated according to the maximum length of table identifiers
- # according to the specific database.
- #
- # == Modules
- #
- # By default, associations will look for objects within the current module scope. Consider:
- #
- # module MyApplication
- # module Business
- # class Firm < ActiveRecord::Base
- # has_many :clients
- # end
- #
- # class Client < ActiveRecord::Base; end
- # end
- # end
- #
- # When <tt>Firm#clients</tt> is called, it will in turn call
- # <tt>MyApplication::Business::Client.find_all_by_firm_id(firm.id)</tt>.
- # If you want to associate with a class in another module scope, this can be done by
- # specifying the complete class name.
- #
- # module MyApplication
- # module Business
- # class Firm < ActiveRecord::Base; end
- # end
- #
- # module Billing
- # class Account < ActiveRecord::Base
- # belongs_to :firm, class_name: "MyApplication::Business::Firm"
- # end
- # end
- # end
- #
- # == Bi-directional associations
- #
- # When you specify an association there is usually an association on the associated model
- # that specifies the same relationship in reverse. For example, with the following models:
- #
- # class Dungeon < ActiveRecord::Base
- # has_many :traps
- # has_one :evil_wizard
- # end
- #
- # class Trap < ActiveRecord::Base
- # belongs_to :dungeon
- # end
- #
- # class EvilWizard < ActiveRecord::Base
- # belongs_to :dungeon
- # end
- #
- # The +traps+ association on +Dungeon+ and the +dungeon+ association on +Trap+ are
- # the inverse of each other and the inverse of the +dungeon+ association on +EvilWizard+
- # is the +evil_wizard+ association on +Dungeon+ (and vice-versa). By default,
- # Active Record can guess the inverse of the association based on the name
- # of the class. The result is the following:
- #
- # d = Dungeon.first
- # t = d.traps.first
- # d.object_id == t.dungeon.object_id # => true
- #
- # The +Dungeon+ instances +d+ and <tt>t.dungeon</tt> in the above example refer to
- # the same in-memory instance since the association matches the name of the class.
- # The result would be the same if we added +:inverse_of+ to our model definitions:
- #
- # class Dungeon < ActiveRecord::Base
- # has_many :traps, inverse_of: :dungeon
- # has_one :evil_wizard, inverse_of: :dungeon
- # end
- #
- # class Trap < ActiveRecord::Base
- # belongs_to :dungeon, inverse_of: :traps
- # end
- #
- # class EvilWizard < ActiveRecord::Base
- # belongs_to :dungeon, inverse_of: :evil_wizard
- # end
- #
- # There are limitations to <tt>:inverse_of</tt> support:
- #
- # * does not work with <tt>:through</tt> associations.
- # * does not work with <tt>:polymorphic</tt> associations.
- # * for #belongs_to associations #has_many inverse associations are ignored.
- #
- # For more information, see the documentation for the +:inverse_of+ option.
- #
- # == Deleting from associations
- #
- # === Dependent associations
- #
- # #has_many, #has_one and #belongs_to associations support the <tt>:dependent</tt> option.
- # This allows you to specify that associated records should be deleted when the owner is
- # deleted.
- #
- # For example:
- #
- # class Author
- # has_many :posts, dependent: :destroy
- # end
- # Author.find(1).destroy # => Will destroy all of the author's posts, too
- #
- # The <tt>:dependent</tt> option can have different values which specify how the deletion
- # is done. For more information, see the documentation for this option on the different
- # specific association types. When no option is given, the behavior is to do nothing
- # with the associated records when destroying a record.
- #
- # Note that <tt>:dependent</tt> is implemented using Rails' callback
- # system, which works by processing callbacks in order. Therefore, other
- # callbacks declared either before or after the <tt>:dependent</tt> option
- # can affect what it does.
- #
- # Note that <tt>:dependent</tt> option is ignored for #has_one <tt>:through</tt> associations.
- #
- # === Delete or destroy?
- #
- # #has_many and #has_and_belongs_to_many associations have the methods <tt>destroy</tt>,
- # <tt>delete</tt>, <tt>destroy_all</tt> and <tt>delete_all</tt>.
- #
- # For #has_and_belongs_to_many, <tt>delete</tt> and <tt>destroy</tt> are the same: they
- # cause the records in the join table to be removed.
- #
- # For #has_many, <tt>destroy</tt> and <tt>destroy_all</tt> will always call the <tt>destroy</tt> method of the
- # record(s) being removed so that callbacks are run. However <tt>delete</tt> and <tt>delete_all</tt> will either
- # do the deletion according to the strategy specified by the <tt>:dependent</tt> option, or
- # if no <tt>:dependent</tt> option is given, then it will follow the default strategy.
- # The default strategy is to do nothing (leave the foreign keys with the parent ids set), except for
- # #has_many <tt>:through</tt>, where the default strategy is <tt>delete_all</tt> (delete
- # the join records, without running their callbacks).
- #
- # There is also a <tt>clear</tt> method which is the same as <tt>delete_all</tt>, except that
- # it returns the association rather than the records which have been deleted.
- #
- # === What gets deleted?
- #
- # There is a potential pitfall here: #has_and_belongs_to_many and #has_many <tt>:through</tt>
- # associations have records in join tables, as well as the associated records. So when we
- # call one of these deletion methods, what exactly should be deleted?
- #
- # The answer is that it is assumed that deletion on an association is about removing the
- # <i>link</i> between the owner and the associated object(s), rather than necessarily the
- # associated objects themselves. So with #has_and_belongs_to_many and #has_many
- # <tt>:through</tt>, the join records will be deleted, but the associated records won't.
- #
- # This makes sense if you think about it: if you were to call <tt>post.tags.delete(Tag.find_by(name: 'food'))</tt>
- # you would want the 'food' tag to be unlinked from the post, rather than for the tag itself
- # to be removed from the database.
- #
- # However, there are examples where this strategy doesn't make sense. For example, suppose
- # a person has many projects, and each project has many tasks. If we deleted one of a person's
- # tasks, we would probably not want the project to be deleted. In this scenario, the delete method
- # won't actually work: it can only be used if the association on the join model is a
- # #belongs_to. In other situations you are expected to perform operations directly on
- # either the associated records or the <tt>:through</tt> association.
- #
- # With a regular #has_many there is no distinction between the "associated records"
- # and the "link", so there is only one choice for what gets deleted.
- #
- # With #has_and_belongs_to_many and #has_many <tt>:through</tt>, if you want to delete the
- # associated records themselves, you can always do something along the lines of
- # <tt>person.tasks.each(&:destroy)</tt>.
- #
- # == Type safety with ActiveRecord::AssociationTypeMismatch
- #
- # If you attempt to assign an object to an association that doesn't match the inferred
- # or specified <tt>:class_name</tt>, you'll get an ActiveRecord::AssociationTypeMismatch.
- #
- # == Options
- #
- # All of the association macros can be specialized through options. This makes cases
- # more complex than the simple and guessable ones possible.
- module ClassMethods
- # Specifies a one-to-many association. The following methods for retrieval and query of
- # collections of associated objects will be added:
- #
- # +collection+ is a placeholder for the symbol passed as the +name+ argument, so
- # <tt>has_many :clients</tt> would add among others <tt>clients.empty?</tt>.
- #
- # [collection(force_reload = false)]
- # Returns an array of all the associated objects.
- # An empty array is returned if none are found.
- # [collection<<(object, ...)]
- # Adds one or more objects to the collection by setting their foreign keys to the collection's primary key.
- # Note that this operation instantly fires update SQL without waiting for the save or update call on the
- # parent object, unless the parent object is a new record.
- # This will also run validations and callbacks of associated object(s).
- # [collection.delete(object, ...)]
- # Removes one or more objects from the collection by setting their foreign keys to +NULL+.
- # Objects will be in addition destroyed if they're associated with <tt>dependent: :destroy</tt>,
- # and deleted if they're associated with <tt>dependent: :delete_all</tt>.
- #
- # If the <tt>:through</tt> option is used, then the join records are deleted (rather than
- # nullified) by default, but you can specify <tt>dependent: :destroy</tt> or
- # <tt>dependent: :nullify</tt> to override this.
- # [collection.destroy(object, ...)]
- # Removes one or more objects from the collection by running <tt>destroy</tt> on
- # each record, regardless of any dependent option, ensuring callbacks are run.
- #
- # If the <tt>:through</tt> option is used, then the join records are destroyed
- # instead, not the objects themselves.
- # [collection=objects]
- # Replaces the collections content by deleting and adding objects as appropriate. If the <tt>:through</tt>
- # option is true callbacks in the join models are triggered except destroy callbacks, since deletion is
- # direct by default. You can specify <tt>dependent: :destroy</tt> or
- # <tt>dependent: :nullify</tt> to override this.
- # [collection_singular_ids]
- # Returns an array of the associated objects' ids
- # [collection_singular_ids=ids]
- # Replace the collection with the objects identified by the primary keys in +ids+. This
- # method loads the models and calls <tt>collection=</tt>. See above.
- # [collection.clear]
- # Removes every object from the collection. This destroys the associated objects if they
- # are associated with <tt>dependent: :destroy</tt>, deletes them directly from the
- # database if <tt>dependent: :delete_all</tt>, otherwise sets their foreign keys to +NULL+.
- # If the <tt>:through</tt> option is true no destroy callbacks are invoked on the join models.
- # Join models are directly deleted.
- # [collection.empty?]
- # Returns +true+ if there are no associated objects.
- # [collection.size]
- # Returns the number of associated objects.
- # [collection.find(...)]
- # Finds an associated object according to the same rules as ActiveRecord::FinderMethods#find.
- # [collection.exists?(...)]
- # Checks whether an associated object with the given conditions exists.
- # Uses the same rules as ActiveRecord::FinderMethods#exists?.
- # [collection.build(attributes = {}, ...)]
- # Returns one or more new objects of the collection type that have been instantiated
- # with +attributes+ and linked to this object through a foreign key, but have not yet
- # been saved.
- # [collection.create(attributes = {})]
- # Returns a new object of the collection type that has been instantiated
- # with +attributes+, linked to this object through a foreign key, and that has already
- # been saved (if it passed the validation). *Note*: This only works if the base model
- # already exists in the DB, not if it is a new (unsaved) record!
- # [collection.create!(attributes = {})]
- # Does the same as <tt>collection.create</tt>, but raises ActiveRecord::RecordInvalid
- # if the record is invalid.
- #
- # === Example
- #
- # A <tt>Firm</tt> class declares <tt>has_many :clients</tt>, which will add:
- # * <tt>Firm#clients</tt> (similar to <tt>Client.where(firm_id: id)</tt>)
- # * <tt>Firm#clients<<</tt>
- # * <tt>Firm#clients.delete</tt>
- # * <tt>Firm#clients.destroy</tt>
- # * <tt>Firm#clients=</tt>
- # * <tt>Firm#client_ids</tt>
- # * <tt>Firm#client_ids=</tt>
- # * <tt>Firm#clients.clear</tt>
- # * <tt>Firm#clients.empty?</tt> (similar to <tt>firm.clients.size == 0</tt>)
- # * <tt>Firm#clients.size</tt> (similar to <tt>Client.count "firm_id = #{id}"</tt>)
- # * <tt>Firm#clients.find</tt> (similar to <tt>Client.where(firm_id: id).find(id)</tt>)
- # * <tt>Firm#clients.exists?(name: 'ACME')</tt> (similar to <tt>Client.exists?(name: 'ACME', firm_id: firm.id)</tt>)
- # * <tt>Firm#clients.build</tt> (similar to <tt>Client.new("firm_id" => id)</tt>)
- # * <tt>Firm#clients.create</tt> (similar to <tt>c = Client.new("firm_id" => id); c.save; c</tt>)
- # * <tt>Firm#clients.create!</tt> (similar to <tt>c = Client.new("firm_id" => id); c.save!</tt>)
- # The declaration can also include an +options+ hash to specialize the behavior of the association.
- #
- # === Scopes
- #
- # You can pass a second argument +scope+ as a callable (i.e. proc or
- # lambda) to retrieve a specific set of records or customize the generated
- # query when you access the associated collection.
- #
- # Scope examples:
- # has_many :comments, -> { where(author_id: 1) }
- # has_many :employees, -> { joins(:address) }
- # has_many :posts, ->(post) { where("max_post_length > ?", post.length) }
- #
- # === Extensions
- #
- # The +extension+ argument allows you to pass a block into a has_many
- # association. This is useful for adding new finders, creators and other
- # factory-type methods to be used as part of the association.
- #
- # Extension examples:
- # has_many :employees do
- # def find_or_create_by_name(name)
- # first_name, last_name = name.split(" ", 2)
- # find_or_create_by(first_name: first_name, last_name: last_name)
+ # \Associations are a set of macro-like class methods for tying objects together through
+ # foreign keys. They express relationships like "Project has one Project Manager"
+ # or "Project belongs to a Portfolio". Each macro adds a number of methods to the
+ # class which are specialized according to the collection or association symbol and the
+ # options hash. It works much the same way as Ruby's own <tt>attr*</tt>
+ # methods.
+ #
+ # class Project < ActiveRecord::Base
+ # belongs_to :portfolio
+ # has_one :project_manager
+ # has_many :milestones
+ # has_and_belongs_to_many :categories
+ # end
+ #
+ # The project class now has the following methods (and more) to ease the traversal and
+ # manipulation of its relationships:
+ # * <tt>Project#portfolio</tt>, <tt>Project#portfolio=(portfolio)</tt>, <tt>Project#reload_portfolio</tt>
+ # * <tt>Project#project_manager</tt>, <tt>Project#project_manager=(project_manager)</tt>, <tt>Project#reload_project_manager</tt>
+ # * <tt>Project#milestones.empty?</tt>, <tt>Project#milestones.size</tt>, <tt>Project#milestones</tt>, <tt>Project#milestones<<(milestone)</tt>,
+ # <tt>Project#milestones.delete(milestone)</tt>, <tt>Project#milestones.destroy(milestone)</tt>, <tt>Project#milestones.find(milestone_id)</tt>,
+ # <tt>Project#milestones.build</tt>, <tt>Project#milestones.create</tt>
+ # * <tt>Project#categories.empty?</tt>, <tt>Project#categories.size</tt>, <tt>Project#categories</tt>, <tt>Project#categories<<(category1)</tt>,
+ # <tt>Project#categories.delete(category1)</tt>, <tt>Project#categories.destroy(category1)</tt>
+ #
+ # === A word of warning
+ #
+ # Don't create associations that have the same name as {instance methods}[rdoc-ref:ActiveRecord::Core] of
+ # <tt>ActiveRecord::Base</tt>. Since the association adds a method with that name to
+ # its model, using an association with the same name as one provided by <tt>ActiveRecord::Base</tt> will override the method inherited through <tt>ActiveRecord::Base</tt> and will break things.
+ # For instance, +attributes+ and +connection+ would be bad choices for association names, because those names already exist in the list of <tt>ActiveRecord::Base</tt> instance methods.
+ #
+ # == Auto-generated methods
+ # See also Instance Public methods below for more details.
+ #
+ # === Singular associations (one-to-one)
+ # | | belongs_to |
+ # generated methods | belongs_to | :polymorphic | has_one
+ # ----------------------------------+------------+--------------+---------
+ # other | X | X | X
+ # other=(other) | X | X | X
+ # build_other(attributes={}) | X | | X
+ # create_other(attributes={}) | X | | X
+ # create_other!(attributes={}) | X | | X
+ # reload_other | X | X | X
+ #
+ # === Collection associations (one-to-many / many-to-many)
+ # | | | has_many
+ # generated methods | habtm | has_many | :through
+ # ----------------------------------+-------+----------+----------
+ # others | X | X | X
+ # others=(other,other,...) | X | X | X
+ # other_ids | X | X | X
+ # other_ids=(id,id,...) | X | X | X
+ # others<< | X | X | X
+ # others.push | X | X | X
+ # others.concat | X | X | X
+ # others.build(attributes={}) | X | X | X
+ # others.create(attributes={}) | X | X | X
+ # others.create!(attributes={}) | X | X | X
+ # others.size | X | X | X
+ # others.length | X | X | X
+ # others.count | X | X | X
+ # others.sum(*args) | X | X | X
+ # others.empty? | X | X | X
+ # others.clear | X | X | X
+ # others.delete(other,other,...) | X | X | X
+ # others.delete_all | X | X | X
+ # others.destroy(other,other,...) | X | X | X
+ # others.destroy_all | X | X | X
+ # others.find(*args) | X | X | X
+ # others.exists? | X | X | X
+ # others.distinct | X | X | X
+ # others.reset | X | X | X
+ # others.reload | X | X | X
+ #
+ # === Overriding generated methods
+ #
+ # Association methods are generated in a module included into the model
+ # class, making overrides easy. The original generated method can thus be
+ # called with +super+:
+ #
+ # class Car < ActiveRecord::Base
+ # belongs_to :owner
+ # belongs_to :old_owner
+ #
+ # def owner=(new_owner)
+ # self.old_owner = self.owner
+ # super
# end
# end
#
- # === Options
- # [:class_name]
- # Specify the class name of the association. Use it only if that name can't be inferred
- # from the association name. So <tt>has_many :products</tt> will by default be linked
- # to the +Product+ class, but if the real class name is +SpecialProduct+, you'll have to
- # specify it with this option.
- # [:foreign_key]
- # Specify the foreign key used for the association. By default this is guessed to be the name
- # of this class in lower-case and "_id" suffixed. So a Person class that makes a #has_many
- # association will use "person_id" as the default <tt>:foreign_key</tt>.
- # [:foreign_type]
- # Specify the column used to store the associated object's type, if this is a polymorphic
- # association. By default this is guessed to be the name of the polymorphic association
- # specified on "as" option with a "_type" suffix. So a class that defines a
- # <tt>has_many :tags, as: :taggable</tt> association will use "taggable_type" as the
- # default <tt>:foreign_type</tt>.
- # [:primary_key]
- # Specify the name of the column to use as the primary key for the association. By default this is +id+.
- # [:dependent]
- # Controls what happens to the associated objects when
- # their owner is destroyed. Note that these are implemented as
- # callbacks, and Rails executes callbacks in order. Therefore, other
- # similar callbacks may affect the <tt>:dependent</tt> behavior, and the
- # <tt>:dependent</tt> behavior may affect other callbacks.
- #
- # * <tt>:destroy</tt> causes all the associated objects to also be destroyed.
- # * <tt>:delete_all</tt> causes all the associated objects to be deleted directly from the database (so callbacks will not be executed).
- # * <tt>:nullify</tt> causes the foreign keys to be set to +NULL+. Callbacks are not executed.
- # * <tt>:restrict_with_exception</tt> causes an exception to be raised if there are any associated records.
- # * <tt>:restrict_with_error</tt> causes an error to be added to the owner if there are any associated objects.
- #
- # If using with the <tt>:through</tt> option, the association on the join model must be
- # a #belongs_to, and the records which get deleted are the join records, rather than
- # the associated records.
- # [:counter_cache]
- # This option can be used to configure a custom named <tt>:counter_cache.</tt> You only need this option,
- # when you customized the name of your <tt>:counter_cache</tt> on the #belongs_to association.
- # [:as]
- # Specifies a polymorphic interface (See #belongs_to).
- # [:through]
- # Specifies an association through which to perform the query. This can be any other type
- # of association, including other <tt>:through</tt> associations. Options for <tt>:class_name</tt>,
- # <tt>:primary_key</tt> and <tt>:foreign_key</tt> are ignored, as the association uses the
- # source reflection.
- #
- # If the association on the join model is a #belongs_to, the collection can be modified
- # and the records on the <tt>:through</tt> model will be automatically created and removed
- # as appropriate. Otherwise, the collection is read-only, so you should manipulate the
- # <tt>:through</tt> association directly.
- #
- # If you are going to modify the association (rather than just read from it), then it is
- # a good idea to set the <tt>:inverse_of</tt> option on the source association on the
- # join model. This allows associated records to be built which will automatically create
- # the appropriate join model records when they are saved. (See the 'Association Join Models'
- # section above.)
- # [:source]
- # Specifies the source association name used by #has_many <tt>:through</tt> queries.
- # Only use it if the name cannot be inferred from the association.
- # <tt>has_many :subscribers, through: :subscriptions</tt> will look for either <tt>:subscribers</tt> or
- # <tt>:subscriber</tt> on Subscription, unless a <tt>:source</tt> is given.
- # [:source_type]
- # Specifies type of the source association used by #has_many <tt>:through</tt> queries where the source
- # association is a polymorphic #belongs_to.
- # [:validate]
- # If +false+, don't validate the associated objects when saving the parent object. true by default.
- # [:autosave]
- # If true, always save the associated objects or destroy them if marked for destruction,
- # when saving the parent object. If false, never save or destroy the associated objects.
- # By default, only save associated objects that are new records. This option is implemented as a
- # +before_save+ callback. Because callbacks are run in the order they are defined, associated objects
- # may need to be explicitly saved in any user-defined +before_save+ callbacks.
- #
- # Note that NestedAttributes::ClassMethods#accepts_nested_attributes_for sets
- # <tt>:autosave</tt> to <tt>true</tt>.
- # [:inverse_of]
- # Specifies the name of the #belongs_to association on the associated object
- # that is the inverse of this #has_many association. Does not work in combination
- # with <tt>:through</tt> or <tt>:as</tt> options.
- # See ActiveRecord::Associations::ClassMethods's overview on Bi-directional associations for more detail.
- # [:extend]
- # Specifies a module or array of modules that will be extended into the association object returned.
- # Useful for defining methods on associations, especially when they should be shared between multiple
- # association objects.
- #
- # Option examples:
- # has_many :comments, -> { order("posted_on") }
- # has_many :comments, -> { includes(:author) }
- # has_many :people, -> { where(deleted: false).order("name") }, class_name: "Person"
- # has_many :tracks, -> { order("position") }, dependent: :destroy
- # has_many :comments, dependent: :nullify
- # has_many :tags, as: :taggable
- # has_many :reports, -> { readonly }
- # has_many :subscribers, through: :subscriptions, source: :user
- def has_many(name, scope = nil, options = {}, &extension)
- reflection = Builder::HasMany.build(self, name, scope, options, &extension)
- Reflection.add_reflection self, name, reflection
- end
-
- # Specifies a one-to-one association with another class. This method should only be used
- # if the other class contains the foreign key. If the current class contains the foreign key,
- # then you should use #belongs_to instead. See also ActiveRecord::Associations::ClassMethods's overview
- # on when to use #has_one and when to use #belongs_to.
- #
- # The following methods for retrieval and query of a single associated object will be added:
- #
- # +association+ is a placeholder for the symbol passed as the +name+ argument, so
- # <tt>has_one :manager</tt> would add among others <tt>manager.nil?</tt>.
- #
- # [association(force_reload = false)]
- # Returns the associated object. +nil+ is returned if none is found.
- # [association=(associate)]
- # Assigns the associate object, extracts the primary key, sets it as the foreign key,
- # and saves the associate object. To avoid database inconsistencies, permanently deletes an existing
- # associated object when assigning a new one, even if the new one isn't saved to database.
- # [build_association(attributes = {})]
- # Returns a new object of the associated type that has been instantiated
- # with +attributes+ and linked to this object through a foreign key, but has not
- # yet been saved.
- # [create_association(attributes = {})]
- # Returns a new object of the associated type that has been instantiated
- # with +attributes+, linked to this object through a foreign key, and that
- # has already been saved (if it passed the validation).
- # [create_association!(attributes = {})]
- # Does the same as <tt>create_association</tt>, but raises ActiveRecord::RecordInvalid
- # if the record is invalid.
- #
- # === Example
- #
- # An Account class declares <tt>has_one :beneficiary</tt>, which will add:
- # * <tt>Account#beneficiary</tt> (similar to <tt>Beneficiary.where(account_id: id).first</tt>)
- # * <tt>Account#beneficiary=(beneficiary)</tt> (similar to <tt>beneficiary.account_id = account.id; beneficiary.save</tt>)
- # * <tt>Account#build_beneficiary</tt> (similar to <tt>Beneficiary.new("account_id" => id)</tt>)
- # * <tt>Account#create_beneficiary</tt> (similar to <tt>b = Beneficiary.new("account_id" => id); b.save; b</tt>)
- # * <tt>Account#create_beneficiary!</tt> (similar to <tt>b = Beneficiary.new("account_id" => id); b.save!; b</tt>)
- #
- # === Scopes
- #
- # You can pass a second argument +scope+ as a callable (i.e. proc or
- # lambda) to retrieve a specific record or customize the generated query
- # when you access the associated object.
- #
- # Scope examples:
- # has_one :author, -> { where(comment_id: 1) }
- # has_one :employer, -> { joins(:company) }
- # has_one :dob, ->(dob) { where("Date.new(2000, 01, 01) > ?", dob) }
- #
- # === Options
- #
- # The declaration can also include an +options+ hash to specialize the behavior of the association.
- #
- # Options are:
- # [:class_name]
- # Specify the class name of the association. Use it only if that name can't be inferred
- # from the association name. So <tt>has_one :manager</tt> will by default be linked to the Manager class, but
- # if the real class name is Person, you'll have to specify it with this option.
- # [:dependent]
- # Controls what happens to the associated object when
- # its owner is destroyed:
- #
- # * <tt>:destroy</tt> causes the associated object to also be destroyed
- # * <tt>:delete</tt> causes the associated object to be deleted directly from the database (so callbacks will not execute)
- # * <tt>:nullify</tt> causes the foreign key to be set to +NULL+. Callbacks are not executed.
- # * <tt>:restrict_with_exception</tt> causes an exception to be raised if there is an associated record
- # * <tt>:restrict_with_error</tt> causes an error to be added to the owner if there is an associated object
- #
- # Note that <tt>:dependent</tt> option is ignored when using <tt>:through</tt> option.
- # [:foreign_key]
- # Specify the foreign key used for the association. By default this is guessed to be the name
- # of this class in lower-case and "_id" suffixed. So a Person class that makes a #has_one association
- # will use "person_id" as the default <tt>:foreign_key</tt>.
- # [:foreign_type]
- # Specify the column used to store the associated object's type, if this is a polymorphic
- # association. By default this is guessed to be the name of the polymorphic association
- # specified on "as" option with a "_type" suffix. So a class that defines a
- # <tt>has_one :tag, as: :taggable</tt> association will use "taggable_type" as the
- # default <tt>:foreign_type</tt>.
- # [:primary_key]
- # Specify the method that returns the primary key used for the association. By default this is +id+.
- # [:as]
- # Specifies a polymorphic interface (See #belongs_to).
- # [:through]
- # Specifies a Join Model through which to perform the query. Options for <tt>:class_name</tt>,
- # <tt>:primary_key</tt>, and <tt>:foreign_key</tt> are ignored, as the association uses the
- # source reflection. You can only use a <tt>:through</tt> query through a #has_one
- # or #belongs_to association on the join model.
- # [:source]
- # Specifies the source association name used by #has_one <tt>:through</tt> queries.
- # Only use it if the name cannot be inferred from the association.
- # <tt>has_one :favorite, through: :favorites</tt> will look for a
- # <tt>:favorite</tt> on Favorite, unless a <tt>:source</tt> is given.
- # [:source_type]
- # Specifies type of the source association used by #has_one <tt>:through</tt> queries where the source
- # association is a polymorphic #belongs_to.
- # [:validate]
- # If +false+, don't validate the associated object when saving the parent object. +false+ by default.
- # [:autosave]
- # If true, always save the associated object or destroy it if marked for destruction,
- # when saving the parent object. If false, never save or destroy the associated object.
- # By default, only save the associated object if it's a new record.
- #
- # Note that NestedAttributes::ClassMethods#accepts_nested_attributes_for sets
- # <tt>:autosave</tt> to <tt>true</tt>.
- # [:inverse_of]
- # Specifies the name of the #belongs_to association on the associated object
- # that is the inverse of this #has_one association. Does not work in combination
- # with <tt>:through</tt> or <tt>:as</tt> options.
- # See ActiveRecord::Associations::ClassMethods's overview on Bi-directional associations for more detail.
- # [:required]
- # When set to +true+, the association will also have its presence validated.
- # This will validate the association itself, not the id. You can use
- # +:inverse_of+ to avoid an extra query during validation.
- #
- # Option examples:
- # has_one :credit_card, dependent: :destroy # destroys the associated credit card
- # has_one :credit_card, dependent: :nullify # updates the associated records foreign
- # # key value to NULL rather than destroying it
- # has_one :last_comment, -> { order('posted_on') }, class_name: "Comment"
- # has_one :project_manager, -> { where(role: 'project_manager') }, class_name: "Person"
- # has_one :attachment, as: :attachable
- # has_one :boss, -> { readonly }
- # has_one :club, through: :membership
- # has_one :primary_address, -> { where(primary: true) }, through: :addressables, source: :addressable
- # has_one :credit_card, required: true
- def has_one(name, scope = nil, options = {})
- reflection = Builder::HasOne.build(self, name, scope, options)
- Reflection.add_reflection self, name, reflection
- end
-
- # Specifies a one-to-one association with another class. This method should only be used
- # if this class contains the foreign key. If the other class contains the foreign key,
- # then you should use #has_one instead. See also ActiveRecord::Associations::ClassMethods's overview
- # on when to use #has_one and when to use #belongs_to.
- #
- # Methods will be added for retrieval and query for a single associated object, for which
- # this object holds an id:
- #
- # +association+ is a placeholder for the symbol passed as the +name+ argument, so
- # <tt>belongs_to :author</tt> would add among others <tt>author.nil?</tt>.
- #
- # [association(force_reload = false)]
- # Returns the associated object. +nil+ is returned if none is found.
- # [association=(associate)]
- # Assigns the associate object, extracts the primary key, and sets it as the foreign key.
- # [build_association(attributes = {})]
- # Returns a new object of the associated type that has been instantiated
- # with +attributes+ and linked to this object through a foreign key, but has not yet been saved.
- # [create_association(attributes = {})]
- # Returns a new object of the associated type that has been instantiated
- # with +attributes+, linked to this object through a foreign key, and that
- # has already been saved (if it passed the validation).
- # [create_association!(attributes = {})]
- # Does the same as <tt>create_association</tt>, but raises ActiveRecord::RecordInvalid
- # if the record is invalid.
- #
- # === Example
- #
- # A Post class declares <tt>belongs_to :author</tt>, which will add:
- # * <tt>Post#author</tt> (similar to <tt>Author.find(author_id)</tt>)
- # * <tt>Post#author=(author)</tt> (similar to <tt>post.author_id = author.id</tt>)
- # * <tt>Post#build_author</tt> (similar to <tt>post.author = Author.new</tt>)
- # * <tt>Post#create_author</tt> (similar to <tt>post.author = Author.new; post.author.save; post.author</tt>)
- # * <tt>Post#create_author!</tt> (similar to <tt>post.author = Author.new; post.author.save!; post.author</tt>)
- # The declaration can also include an +options+ hash to specialize the behavior of the association.
- #
- # === Scopes
- #
- # You can pass a second argument +scope+ as a callable (i.e. proc or
- # lambda) to retrieve a specific record or customize the generated query
- # when you access the associated object.
- #
- # Scope examples:
- # belongs_to :firm, -> { where(id: 2) }
- # belongs_to :user, -> { joins(:friends) }
- # belongs_to :level, ->(level) { where("game_level > ?", level.current) }
- #
- # === Options
- #
- # [:class_name]
- # Specify the class name of the association. Use it only if that name can't be inferred
- # from the association name. So <tt>belongs_to :author</tt> will by default be linked to the Author class, but
- # if the real class name is Person, you'll have to specify it with this option.
- # [:foreign_key]
- # Specify the foreign key used for the association. By default this is guessed to be the name
- # of the association with an "_id" suffix. So a class that defines a <tt>belongs_to :person</tt>
- # association will use "person_id" as the default <tt>:foreign_key</tt>. Similarly,
- # <tt>belongs_to :favorite_person, class_name: "Person"</tt> will use a foreign key
- # of "favorite_person_id".
- # [:foreign_type]
- # Specify the column used to store the associated object's type, if this is a polymorphic
- # association. By default this is guessed to be the name of the association with a "_type"
- # suffix. So a class that defines a <tt>belongs_to :taggable, polymorphic: true</tt>
- # association will use "taggable_type" as the default <tt>:foreign_type</tt>.
- # [:primary_key]
- # Specify the method that returns the primary key of associated object used for the association.
- # By default this is id.
- # [:dependent]
- # If set to <tt>:destroy</tt>, the associated object is destroyed when this object is. If set to
- # <tt>:delete</tt>, the associated object is deleted *without* calling its destroy method.
- # This option should not be specified when #belongs_to is used in conjunction with
- # a #has_many relationship on another class because of the potential to leave
- # orphaned records behind.
- # [:counter_cache]
- # Caches the number of belonging objects on the associate class through the use of CounterCache::ClassMethods#increment_counter
- # and CounterCache::ClassMethods#decrement_counter. The counter cache is incremented when an object of this
- # class is created and decremented when it's destroyed. This requires that a column
- # named <tt>#{table_name}_count</tt> (such as +comments_count+ for a belonging Comment class)
- # is used on the associate class (such as a Post class) - that is the migration for
- # <tt>#{table_name}_count</tt> is created on the associate class (such that <tt>Post.comments_count</tt> will
- # return the count cached, see note below). You can also specify a custom counter
- # cache column by providing a column name instead of a +true+/+false+ value to this
- # option (e.g., <tt>counter_cache: :my_custom_counter</tt>.)
- # Note: Specifying a counter cache will add it to that model's list of readonly attributes
- # using +attr_readonly+.
- # [:polymorphic]
- # Specify this association is a polymorphic association by passing +true+.
- # Note: If you've enabled the counter cache, then you may want to add the counter cache attribute
- # to the +attr_readonly+ list in the associated classes (e.g. <tt>class Post; attr_readonly :comments_count; end</tt>).
- # [:validate]
- # If +false+, don't validate the associated objects when saving the parent object. +false+ by default.
- # [:autosave]
- # If true, always save the associated object or destroy it if marked for destruction, when
- # saving the parent object.
- # If false, never save or destroy the associated object.
- # By default, only save the associated object if it's a new record.
- #
- # Note that NestedAttributes::ClassMethods#accepts_nested_attributes_for
- # sets <tt>:autosave</tt> to <tt>true</tt>.
- # [:touch]
- # If true, the associated object will be touched (the updated_at/on attributes set to current time)
- # when this record is either saved or destroyed. If you specify a symbol, that attribute
- # will be updated with the current time in addition to the updated_at/on attribute.
- # Please note that with touching no validation is performed and only the +after_touch+,
- # +after_commit+ and +after_rollback+ callbacks are executed.
- # [:inverse_of]
- # Specifies the name of the #has_one or #has_many association on the associated
- # object that is the inverse of this #belongs_to association. Does not work in
- # combination with the <tt>:polymorphic</tt> options.
- # See ActiveRecord::Associations::ClassMethods's overview on Bi-directional associations for more detail.
- # [:optional]
- # When set to +true+, the association will not have its presence validated.
- # [:required]
- # When set to +true+, the association will also have its presence validated.
- # This will validate the association itself, not the id. You can use
- # +:inverse_of+ to avoid an extra query during validation.
- # NOTE: <tt>required</tt> is set to <tt>true</tt> by default and is deprecated. If
- # you don't want to have association presence validated, use <tt>optional: true</tt>.
- #
- # Option examples:
- # belongs_to :firm, foreign_key: "client_of"
- # belongs_to :person, primary_key: "name", foreign_key: "person_name"
- # belongs_to :author, class_name: "Person", foreign_key: "author_id"
- # belongs_to :valid_coupon, ->(o) { where "discounts > ?", o.payments_count },
- # class_name: "Coupon", foreign_key: "coupon_id"
- # belongs_to :attachable, polymorphic: true
- # belongs_to :project, -> { readonly }
- # belongs_to :post, counter_cache: true
- # belongs_to :comment, touch: true
- # belongs_to :company, touch: :employees_last_updated_at
- # belongs_to :user, optional: true
- def belongs_to(name, scope = nil, options = {})
- reflection = Builder::BelongsTo.build(self, name, scope, options)
- Reflection.add_reflection self, name, reflection
- end
-
- # Specifies a many-to-many relationship with another class. This associates two classes via an
- # intermediate join table. Unless the join table is explicitly specified as an option, it is
- # guessed using the lexical order of the class names. So a join between Developer and Project
- # will give the default join table name of "developers_projects" because "D" precedes "P" alphabetically.
- # Note that this precedence is calculated using the <tt><</tt> operator for String. This
- # means that if the strings are of different lengths, and the strings are equal when compared
- # up to the shortest length, then the longer string is considered of higher
- # lexical precedence than the shorter one. For example, one would expect the tables "paper_boxes" and "papers"
- # to generate a join table name of "papers_paper_boxes" because of the length of the name "paper_boxes",
- # but it in fact generates a join table name of "paper_boxes_papers". Be aware of this caveat, and use the
- # custom <tt>:join_table</tt> option if you need to.
- # If your tables share a common prefix, it will only appear once at the beginning. For example,
- # the tables "catalog_categories" and "catalog_products" generate a join table name of "catalog_categories_products".
- #
- # The join table should not have a primary key or a model associated with it. You must manually generate the
- # join table with a migration such as this:
- #
- # class CreateDevelopersProjectsJoinTable < ActiveRecord::Migration[5.0]
- # def change
- # create_join_table :developers, :projects
+ # The association methods module is included immediately after the
+ # generated attributes methods module, meaning an association will
+ # override the methods for an attribute with the same name.
+ #
+ # == Cardinality and associations
+ #
+ # Active Record associations can be used to describe one-to-one, one-to-many and many-to-many
+ # relationships between models. Each model uses an association to describe its role in
+ # the relation. The #belongs_to association is always used in the model that has
+ # the foreign key.
+ #
+ # === One-to-one
+ #
+ # Use #has_one in the base, and #belongs_to in the associated model.
+ #
+ # class Employee < ActiveRecord::Base
+ # has_one :office
+ # end
+ # class Office < ActiveRecord::Base
+ # belongs_to :employee # foreign key - employee_id
+ # end
+ #
+ # === One-to-many
+ #
+ # Use #has_many in the base, and #belongs_to in the associated model.
+ #
+ # class Manager < ActiveRecord::Base
+ # has_many :employees
+ # end
+ # class Employee < ActiveRecord::Base
+ # belongs_to :manager # foreign key - manager_id
+ # end
+ #
+ # === Many-to-many
+ #
+ # There are two ways to build a many-to-many relationship.
+ #
+ # The first way uses a #has_many association with the <tt>:through</tt> option and a join model, so
+ # there are two stages of associations.
+ #
+ # class Assignment < ActiveRecord::Base
+ # belongs_to :programmer # foreign key - programmer_id
+ # belongs_to :project # foreign key - project_id
+ # end
+ # class Programmer < ActiveRecord::Base
+ # has_many :assignments
+ # has_many :projects, through: :assignments
+ # end
+ # class Project < ActiveRecord::Base
+ # has_many :assignments
+ # has_many :programmers, through: :assignments
+ # end
+ #
+ # For the second way, use #has_and_belongs_to_many in both models. This requires a join table
+ # that has no corresponding model or primary key.
+ #
+ # class Programmer < ActiveRecord::Base
+ # has_and_belongs_to_many :projects # foreign keys in the join table
+ # end
+ # class Project < ActiveRecord::Base
+ # has_and_belongs_to_many :programmers # foreign keys in the join table
+ # end
+ #
+ # Choosing which way to build a many-to-many relationship is not always simple.
+ # If you need to work with the relationship model as its own entity,
+ # use #has_many <tt>:through</tt>. Use #has_and_belongs_to_many when working with legacy schemas or when
+ # you never work directly with the relationship itself.
+ #
+ # == Is it a #belongs_to or #has_one association?
+ #
+ # Both express a 1-1 relationship. The difference is mostly where to place the foreign
+ # key, which goes on the table for the class declaring the #belongs_to relationship.
+ #
+ # class User < ActiveRecord::Base
+ # # I reference an account.
+ # belongs_to :account
+ # end
+ #
+ # class Account < ActiveRecord::Base
+ # # One user references me.
+ # has_one :user
+ # end
+ #
+ # The tables for these classes could look something like:
+ #
+ # CREATE TABLE users (
+ # id bigint NOT NULL auto_increment,
+ # account_id bigint default NULL,
+ # name varchar default NULL,
+ # PRIMARY KEY (id)
+ # )
+ #
+ # CREATE TABLE accounts (
+ # id bigint NOT NULL auto_increment,
+ # name varchar default NULL,
+ # PRIMARY KEY (id)
+ # )
+ #
+ # == Unsaved objects and associations
+ #
+ # You can manipulate objects and associations before they are saved to the database, but
+ # there is some special behavior you should be aware of, mostly involving the saving of
+ # associated objects.
+ #
+ # You can set the <tt>:autosave</tt> option on a #has_one, #belongs_to,
+ # #has_many, or #has_and_belongs_to_many association. Setting it
+ # to +true+ will _always_ save the members, whereas setting it to +false+ will
+ # _never_ save the members. More details about <tt>:autosave</tt> option is available at
+ # AutosaveAssociation.
+ #
+ # === One-to-one associations
+ #
+ # * Assigning an object to a #has_one association automatically saves that object and
+ # the object being replaced (if there is one), in order to update their foreign
+ # keys - except if the parent object is unsaved (<tt>new_record? == true</tt>).
+ # * If either of these saves fail (due to one of the objects being invalid), an
+ # ActiveRecord::RecordNotSaved exception is raised and the assignment is
+ # cancelled.
+ # * If you wish to assign an object to a #has_one association without saving it,
+ # use the <tt>#build_association</tt> method (documented below). The object being
+ # replaced will still be saved to update its foreign key.
+ # * Assigning an object to a #belongs_to association does not save the object, since
+ # the foreign key field belongs on the parent. It does not save the parent either.
+ #
+ # === Collections
+ #
+ # * Adding an object to a collection (#has_many or #has_and_belongs_to_many) automatically
+ # saves that object, except if the parent object (the owner of the collection) is not yet
+ # stored in the database.
+ # * If saving any of the objects being added to a collection (via <tt>push</tt> or similar)
+ # fails, then <tt>push</tt> returns +false+.
+ # * If saving fails while replacing the collection (via <tt>association=</tt>), an
+ # ActiveRecord::RecordNotSaved exception is raised and the assignment is
+ # cancelled.
+ # * You can add an object to a collection without automatically saving it by using the
+ # <tt>collection.build</tt> method (documented below).
+ # * All unsaved (<tt>new_record? == true</tt>) members of the collection are automatically
+ # saved when the parent is saved.
+ #
+ # == Customizing the query
+ #
+ # \Associations are built from <tt>Relation</tt> objects, and you can use the Relation syntax
+ # to customize them. For example, to add a condition:
+ #
+ # class Blog < ActiveRecord::Base
+ # has_many :published_posts, -> { where(published: true) }, class_name: 'Post'
+ # end
+ #
+ # Inside the <tt>-> { ... }</tt> block you can use all of the usual Relation methods.
+ #
+ # === Accessing the owner object
+ #
+ # Sometimes it is useful to have access to the owner object when building the query. The owner
+ # is passed as a parameter to the block. For example, the following association would find all
+ # events that occur on the user's birthday:
+ #
+ # class User < ActiveRecord::Base
+ # has_many :birthday_events, ->(user) { where(starts_on: user.birthday) }, class_name: 'Event'
+ # end
+ #
+ # Note: Joining, eager loading and preloading of these associations is not possible.
+ # These operations happen before instance creation and the scope will be called with a +nil+ argument.
+ #
+ # == Association callbacks
+ #
+ # Similar to the normal callbacks that hook into the life cycle of an Active Record object,
+ # you can also define callbacks that get triggered when you add an object to or remove an
+ # object from an association collection.
+ #
+ # class Project
+ # has_and_belongs_to_many :developers, after_add: :evaluate_velocity
+ #
+ # def evaluate_velocity(developer)
+ # ...
+ # end
+ # end
+ #
+ # It's possible to stack callbacks by passing them as an array. Example:
+ #
+ # class Project
+ # has_and_belongs_to_many :developers,
+ # after_add: [:evaluate_velocity, Proc.new { |p, d| p.shipping_date = Time.now}]
+ # end
+ #
+ # Possible callbacks are: +before_add+, +after_add+, +before_remove+ and +after_remove+.
+ #
+ # If any of the +before_add+ callbacks throw an exception, the object will not be
+ # added to the collection.
+ #
+ # Similarly, if any of the +before_remove+ callbacks throw an exception, the object
+ # will not be removed from the collection.
+ #
+ # == Association extensions
+ #
+ # The proxy objects that control the access to associations can be extended through anonymous
+ # modules. This is especially beneficial for adding new finders, creators, and other
+ # factory-type methods that are only used as part of this association.
+ #
+ # class Account < ActiveRecord::Base
+ # has_many :people do
+ # def find_or_create_by_name(name)
+ # first_name, last_name = name.split(" ", 2)
+ # find_or_create_by(first_name: first_name, last_name: last_name)
+ # end
# end
# end
#
- # It's also a good idea to add indexes to each of those columns to speed up the joins process.
- # However, in MySQL it is advised to add a compound index for both of the columns as MySQL only
- # uses one index per table during the lookup.
- #
- # Adds the following methods for retrieval and query:
- #
- # +collection+ is a placeholder for the symbol passed as the +name+ argument, so
- # <tt>has_and_belongs_to_many :categories</tt> would add among others <tt>categories.empty?</tt>.
- #
- # [collection(force_reload = false)]
- # Returns an array of all the associated objects.
- # An empty array is returned if none are found.
- # [collection<<(object, ...)]
- # Adds one or more objects to the collection by creating associations in the join table
- # (<tt>collection.push</tt> and <tt>collection.concat</tt> are aliases to this method).
- # Note that this operation instantly fires update SQL without waiting for the save or update call on the
- # parent object, unless the parent object is a new record.
- # [collection.delete(object, ...)]
- # Removes one or more objects from the collection by removing their associations from the join table.
- # This does not destroy the objects.
- # [collection.destroy(object, ...)]
- # Removes one or more objects from the collection by running destroy on each association in the join table, overriding any dependent option.
- # This does not destroy the objects.
- # [collection=objects]
- # Replaces the collection's content by deleting and adding objects as appropriate.
- # [collection_singular_ids]
- # Returns an array of the associated objects' ids.
- # [collection_singular_ids=ids]
- # Replace the collection by the objects identified by the primary keys in +ids+.
- # [collection.clear]
- # Removes every object from the collection. This does not destroy the objects.
- # [collection.empty?]
- # Returns +true+ if there are no associated objects.
- # [collection.size]
- # Returns the number of associated objects.
- # [collection.find(id)]
- # Finds an associated object responding to the +id+ and that
- # meets the condition that it has to be associated with this object.
- # Uses the same rules as ActiveRecord::FinderMethods#find.
- # [collection.exists?(...)]
- # Checks whether an associated object with the given conditions exists.
- # Uses the same rules as ActiveRecord::FinderMethods#exists?.
- # [collection.build(attributes = {})]
- # Returns a new object of the collection type that has been instantiated
- # with +attributes+ and linked to this object through the join table, but has not yet been saved.
- # [collection.create(attributes = {})]
- # Returns a new object of the collection type that has been instantiated
- # with +attributes+, linked to this object through the join table, and that has already been
- # saved (if it passed the validation).
- #
- # === Example
- #
- # A Developer class declares <tt>has_and_belongs_to_many :projects</tt>, which will add:
- # * <tt>Developer#projects</tt>
- # * <tt>Developer#projects<<</tt>
- # * <tt>Developer#projects.delete</tt>
- # * <tt>Developer#projects.destroy</tt>
- # * <tt>Developer#projects=</tt>
- # * <tt>Developer#project_ids</tt>
- # * <tt>Developer#project_ids=</tt>
- # * <tt>Developer#projects.clear</tt>
- # * <tt>Developer#projects.empty?</tt>
- # * <tt>Developer#projects.size</tt>
- # * <tt>Developer#projects.find(id)</tt>
- # * <tt>Developer#projects.exists?(...)</tt>
- # * <tt>Developer#projects.build</tt> (similar to <tt>Project.new("developer_id" => id)</tt>)
- # * <tt>Developer#projects.create</tt> (similar to <tt>c = Project.new("developer_id" => id); c.save; c</tt>)
- # The declaration may include an +options+ hash to specialize the behavior of the association.
- #
- # === Scopes
- #
- # You can pass a second argument +scope+ as a callable (i.e. proc or
- # lambda) to retrieve a specific set of records or customize the generated
- # query when you access the associated collection.
- #
- # Scope examples:
- # has_and_belongs_to_many :projects, -> { includes(:milestones, :manager) }
- # has_and_belongs_to_many :categories, ->(category) {
- # where("default_category = ?", category.name)
- # }
- #
- # === Extensions
- #
- # The +extension+ argument allows you to pass a block into a
- # has_and_belongs_to_many association. This is useful for adding new
- # finders, creators and other factory-type methods to be used as part of
- # the association.
- #
- # Extension examples:
- # has_and_belongs_to_many :contractors do
+ # person = Account.first.people.find_or_create_by_name("David Heinemeier Hansson")
+ # person.first_name # => "David"
+ # person.last_name # => "Heinemeier Hansson"
+ #
+ # If you need to share the same extensions between many associations, you can use a named
+ # extension module.
+ #
+ # module FindOrCreateByNameExtension
# def find_or_create_by_name(name)
# first_name, last_name = name.split(" ", 2)
# find_or_create_by(first_name: first_name, last_name: last_name)
# end
# end
#
- # === Options
- #
- # [:class_name]
- # Specify the class name of the association. Use it only if that name can't be inferred
- # from the association name. So <tt>has_and_belongs_to_many :projects</tt> will by default be linked to the
- # Project class, but if the real class name is SuperProject, you'll have to specify it with this option.
- # [:join_table]
- # Specify the name of the join table if the default based on lexical order isn't what you want.
- # <b>WARNING:</b> If you're overwriting the table name of either class, the +table_name+ method
- # MUST be declared underneath any #has_and_belongs_to_many declaration in order to work.
- # [:foreign_key]
- # Specify the foreign key used for the association. By default this is guessed to be the name
- # of this class in lower-case and "_id" suffixed. So a Person class that makes
- # a #has_and_belongs_to_many association to Project will use "person_id" as the
- # default <tt>:foreign_key</tt>.
- # [:association_foreign_key]
- # Specify the foreign key used for the association on the receiving side of the association.
- # By default this is guessed to be the name of the associated class in lower-case and "_id" suffixed.
- # So if a Person class makes a #has_and_belongs_to_many association to Project,
- # the association will use "project_id" as the default <tt>:association_foreign_key</tt>.
- # [:validate]
- # If +false+, don't validate the associated objects when saving the parent object. +true+ by default.
- # [:autosave]
- # If true, always save the associated objects or destroy them if marked for destruction, when
- # saving the parent object.
- # If false, never save or destroy the associated objects.
- # By default, only save associated objects that are new records.
- #
- # Note that NestedAttributes::ClassMethods#accepts_nested_attributes_for sets
- # <tt>:autosave</tt> to <tt>true</tt>.
- #
- # Option examples:
- # has_and_belongs_to_many :projects
- # has_and_belongs_to_many :projects, -> { includes(:milestones, :manager) }
- # has_and_belongs_to_many :nations, class_name: "Country"
- # has_and_belongs_to_many :categories, join_table: "prods_cats"
- # has_and_belongs_to_many :categories, -> { readonly }
- def has_and_belongs_to_many(name, scope = nil, options = {}, &extension)
- if scope.is_a?(Hash)
- options = scope
- scope = nil
+ # class Account < ActiveRecord::Base
+ # has_many :people, -> { extending FindOrCreateByNameExtension }
+ # end
+ #
+ # class Company < ActiveRecord::Base
+ # has_many :people, -> { extending FindOrCreateByNameExtension }
+ # end
+ #
+ # Some extensions can only be made to work with knowledge of the association's internals.
+ # Extensions can access relevant state using the following methods (where +items+ is the
+ # name of the association):
+ #
+ # * <tt>record.association(:items).owner</tt> - Returns the object the association is part of.
+ # * <tt>record.association(:items).reflection</tt> - Returns the reflection object that describes the association.
+ # * <tt>record.association(:items).target</tt> - Returns the associated object for #belongs_to and #has_one, or
+ # the collection of associated objects for #has_many and #has_and_belongs_to_many.
+ #
+ # However, inside the actual extension code, you will not have access to the <tt>record</tt> as
+ # above. In this case, you can access <tt>proxy_association</tt>. For example,
+ # <tt>record.association(:items)</tt> and <tt>record.items.proxy_association</tt> will return
+ # the same object, allowing you to make calls like <tt>proxy_association.owner</tt> inside
+ # association extensions.
+ #
+ # == Association Join Models
+ #
+ # Has Many associations can be configured with the <tt>:through</tt> option to use an
+ # explicit join model to retrieve the data. This operates similarly to a
+ # #has_and_belongs_to_many association. The advantage is that you're able to add validations,
+ # callbacks, and extra attributes on the join model. Consider the following schema:
+ #
+ # class Author < ActiveRecord::Base
+ # has_many :authorships
+ # has_many :books, through: :authorships
+ # end
+ #
+ # class Authorship < ActiveRecord::Base
+ # belongs_to :author
+ # belongs_to :book
+ # end
+ #
+ # @author = Author.first
+ # @author.authorships.collect { |a| a.book } # selects all books that the author's authorships belong to
+ # @author.books # selects all books by using the Authorship join model
+ #
+ # You can also go through a #has_many association on the join model:
+ #
+ # class Firm < ActiveRecord::Base
+ # has_many :clients
+ # has_many :invoices, through: :clients
+ # end
+ #
+ # class Client < ActiveRecord::Base
+ # belongs_to :firm
+ # has_many :invoices
+ # end
+ #
+ # class Invoice < ActiveRecord::Base
+ # belongs_to :client
+ # end
+ #
+ # @firm = Firm.first
+ # @firm.clients.flat_map { |c| c.invoices } # select all invoices for all clients of the firm
+ # @firm.invoices # selects all invoices by going through the Client join model
+ #
+ # Similarly you can go through a #has_one association on the join model:
+ #
+ # class Group < ActiveRecord::Base
+ # has_many :users
+ # has_many :avatars, through: :users
+ # end
+ #
+ # class User < ActiveRecord::Base
+ # belongs_to :group
+ # has_one :avatar
+ # end
+ #
+ # class Avatar < ActiveRecord::Base
+ # belongs_to :user
+ # end
+ #
+ # @group = Group.first
+ # @group.users.collect { |u| u.avatar }.compact # select all avatars for all users in the group
+ # @group.avatars # selects all avatars by going through the User join model.
+ #
+ # An important caveat with going through #has_one or #has_many associations on the
+ # join model is that these associations are *read-only*. For example, the following
+ # would not work following the previous example:
+ #
+ # @group.avatars << Avatar.new # this would work if User belonged_to Avatar rather than the other way around
+ # @group.avatars.delete(@group.avatars.last) # so would this
+ #
+ # == Setting Inverses
+ #
+ # If you are using a #belongs_to on the join model, it is a good idea to set the
+ # <tt>:inverse_of</tt> option on the #belongs_to, which will mean that the following example
+ # works correctly (where <tt>tags</tt> is a #has_many <tt>:through</tt> association):
+ #
+ # @post = Post.first
+ # @tag = @post.tags.build name: "ruby"
+ # @tag.save
+ #
+ # The last line ought to save the through record (a <tt>Tagging</tt>). This will only work if the
+ # <tt>:inverse_of</tt> is set:
+ #
+ # class Tagging < ActiveRecord::Base
+ # belongs_to :post
+ # belongs_to :tag, inverse_of: :taggings
+ # end
+ #
+ # If you do not set the <tt>:inverse_of</tt> record, the association will
+ # do its best to match itself up with the correct inverse. Automatic
+ # inverse detection only works on #has_many, #has_one, and
+ # #belongs_to associations.
+ #
+ # Extra options on the associations, as defined in the
+ # <tt>AssociationReflection::INVALID_AUTOMATIC_INVERSE_OPTIONS</tt>
+ # constant, or a custom scope, will also prevent the association's inverse
+ # from being found automatically.
+ #
+ # The automatic guessing of the inverse association uses a heuristic based
+ # on the name of the class, so it may not work for all associations,
+ # especially the ones with non-standard names.
+ #
+ # You can turn off the automatic detection of inverse associations by setting
+ # the <tt>:inverse_of</tt> option to <tt>false</tt> like so:
+ #
+ # class Tagging < ActiveRecord::Base
+ # belongs_to :tag, inverse_of: false
+ # end
+ #
+ # == Nested \Associations
+ #
+ # You can actually specify *any* association with the <tt>:through</tt> option, including an
+ # association which has a <tt>:through</tt> option itself. For example:
+ #
+ # class Author < ActiveRecord::Base
+ # has_many :posts
+ # has_many :comments, through: :posts
+ # has_many :commenters, through: :comments
+ # end
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :comments
+ # end
+ #
+ # class Comment < ActiveRecord::Base
+ # belongs_to :commenter
+ # end
+ #
+ # @author = Author.first
+ # @author.commenters # => People who commented on posts written by the author
+ #
+ # An equivalent way of setting up this association this would be:
+ #
+ # class Author < ActiveRecord::Base
+ # has_many :posts
+ # has_many :commenters, through: :posts
+ # end
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :comments
+ # has_many :commenters, through: :comments
+ # end
+ #
+ # class Comment < ActiveRecord::Base
+ # belongs_to :commenter
+ # end
+ #
+ # When using a nested association, you will not be able to modify the association because there
+ # is not enough information to know what modification to make. For example, if you tried to
+ # add a <tt>Commenter</tt> in the example above, there would be no way to tell how to set up the
+ # intermediate <tt>Post</tt> and <tt>Comment</tt> objects.
+ #
+ # == Polymorphic \Associations
+ #
+ # Polymorphic associations on models are not restricted on what types of models they
+ # can be associated with. Rather, they specify an interface that a #has_many association
+ # must adhere to.
+ #
+ # class Asset < ActiveRecord::Base
+ # belongs_to :attachable, polymorphic: true
+ # end
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :assets, as: :attachable # The :as option specifies the polymorphic interface to use.
+ # end
+ #
+ # @asset.attachable = @post
+ #
+ # This works by using a type column in addition to a foreign key to specify the associated
+ # record. In the Asset example, you'd need an +attachable_id+ integer column and an
+ # +attachable_type+ string column.
+ #
+ # Using polymorphic associations in combination with single table inheritance (STI) is
+ # a little tricky. In order for the associations to work as expected, ensure that you
+ # store the base model for the STI models in the type column of the polymorphic
+ # association. To continue with the asset example above, suppose there are guest posts
+ # and member posts that use the posts table for STI. In this case, there must be a +type+
+ # column in the posts table.
+ #
+ # Note: The <tt>attachable_type=</tt> method is being called when assigning an +attachable+.
+ # The +class_name+ of the +attachable+ is passed as a String.
+ #
+ # class Asset < ActiveRecord::Base
+ # belongs_to :attachable, polymorphic: true
+ #
+ # def attachable_type=(class_name)
+ # super(class_name.constantize.base_class.to_s)
+ # end
+ # end
+ #
+ # class Post < ActiveRecord::Base
+ # # because we store "Post" in attachable_type now dependent: :destroy will work
+ # has_many :assets, as: :attachable, dependent: :destroy
+ # end
+ #
+ # class GuestPost < Post
+ # end
+ #
+ # class MemberPost < Post
+ # end
+ #
+ # == Caching
+ #
+ # All of the methods are built on a simple caching principle that will keep the result
+ # of the last query around unless specifically instructed not to. The cache is even
+ # shared across methods to make it even cheaper to use the macro-added methods without
+ # worrying too much about performance at the first go.
+ #
+ # project.milestones # fetches milestones from the database
+ # project.milestones.size # uses the milestone cache
+ # project.milestones.empty? # uses the milestone cache
+ # project.milestones.reload.size # fetches milestones from the database
+ # project.milestones # uses the milestone cache
+ #
+ # == Eager loading of associations
+ #
+ # Eager loading is a way to find objects of a certain class and a number of named associations.
+ # It is one of the easiest ways to prevent the dreaded N+1 problem in which fetching 100
+ # posts that each need to display their author triggers 101 database queries. Through the
+ # use of eager loading, the number of queries will be reduced from 101 to 2.
+ #
+ # class Post < ActiveRecord::Base
+ # belongs_to :author
+ # has_many :comments
+ # end
+ #
+ # Consider the following loop using the class above:
+ #
+ # Post.all.each do |post|
+ # puts "Post: " + post.title
+ # puts "Written by: " + post.author.name
+ # puts "Last comment on: " + post.comments.first.created_on
+ # end
+ #
+ # To iterate over these one hundred posts, we'll generate 201 database queries. Let's
+ # first just optimize it for retrieving the author:
+ #
+ # Post.includes(:author).each do |post|
+ #
+ # This references the name of the #belongs_to association that also used the <tt>:author</tt>
+ # symbol. After loading the posts, +find+ will collect the +author_id+ from each one and load
+ # all of the referenced authors with one query. Doing so will cut down the number of queries
+ # from 201 to 102.
+ #
+ # We can improve upon the situation further by referencing both associations in the finder with:
+ #
+ # Post.includes(:author, :comments).each do |post|
+ #
+ # This will load all comments with a single query. This reduces the total number of queries
+ # to 3. In general, the number of queries will be 1 plus the number of associations
+ # named (except if some of the associations are polymorphic #belongs_to - see below).
+ #
+ # To include a deep hierarchy of associations, use a hash:
+ #
+ # Post.includes(:author, { comments: { author: :gravatar } }).each do |post|
+ #
+ # The above code will load all the comments and all of their associated
+ # authors and gravatars. You can mix and match any combination of symbols,
+ # arrays, and hashes to retrieve the associations you want to load.
+ #
+ # All of this power shouldn't fool you into thinking that you can pull out huge amounts
+ # of data with no performance penalty just because you've reduced the number of queries.
+ # The database still needs to send all the data to Active Record and it still needs to
+ # be processed. So it's no catch-all for performance problems, but it's a great way to
+ # cut down on the number of queries in a situation as the one described above.
+ #
+ # Since only one table is loaded at a time, conditions or orders cannot reference tables
+ # other than the main one. If this is the case, Active Record falls back to the previously
+ # used <tt>LEFT OUTER JOIN</tt> based strategy. For example:
+ #
+ # Post.includes([:author, :comments]).where(['comments.approved = ?', true])
+ #
+ # This will result in a single SQL query with joins along the lines of:
+ # <tt>LEFT OUTER JOIN comments ON comments.post_id = posts.id</tt> and
+ # <tt>LEFT OUTER JOIN authors ON authors.id = posts.author_id</tt>. Note that using conditions
+ # like this can have unintended consequences.
+ # In the above example, posts with no approved comments are not returned at all because
+ # the conditions apply to the SQL statement as a whole and not just to the association.
+ #
+ # You must disambiguate column references for this fallback to happen, for example
+ # <tt>order: "author.name DESC"</tt> will work but <tt>order: "name DESC"</tt> will not.
+ #
+ # If you want to load all posts (including posts with no approved comments), then write
+ # your own <tt>LEFT OUTER JOIN</tt> query using <tt>ON</tt>:
+ #
+ # Post.joins("LEFT OUTER JOIN comments ON comments.post_id = posts.id AND comments.approved = '1'")
+ #
+ # In this case, it is usually more natural to include an association which has conditions defined on it:
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :approved_comments, -> { where(approved: true) }, class_name: 'Comment'
+ # end
+ #
+ # Post.includes(:approved_comments)
+ #
+ # This will load posts and eager load the +approved_comments+ association, which contains
+ # only those comments that have been approved.
+ #
+ # If you eager load an association with a specified <tt>:limit</tt> option, it will be ignored,
+ # returning all the associated objects:
+ #
+ # class Picture < ActiveRecord::Base
+ # has_many :most_recent_comments, -> { order('id DESC').limit(10) }, class_name: 'Comment'
+ # end
+ #
+ # Picture.includes(:most_recent_comments).first.most_recent_comments # => returns all associated comments.
+ #
+ # Eager loading is supported with polymorphic associations.
+ #
+ # class Address < ActiveRecord::Base
+ # belongs_to :addressable, polymorphic: true
+ # end
+ #
+ # A call that tries to eager load the addressable model
+ #
+ # Address.includes(:addressable)
+ #
+ # This will execute one query to load the addresses and load the addressables with one
+ # query per addressable type.
+ # For example, if all the addressables are either of class Person or Company, then a total
+ # of 3 queries will be executed. The list of addressable types to load is determined on
+ # the back of the addresses loaded. This is not supported if Active Record has to fallback
+ # to the previous implementation of eager loading and will raise ActiveRecord::EagerLoadPolymorphicError.
+ # The reason is that the parent model's type is a column value so its corresponding table
+ # name cannot be put in the +FROM+/+JOIN+ clauses of that query.
+ #
+ # == Table Aliasing
+ #
+ # Active Record uses table aliasing in the case that a table is referenced multiple times
+ # in a join. If a table is referenced only once, the standard table name is used. The
+ # second time, the table is aliased as <tt>#{reflection_name}_#{parent_table_name}</tt>.
+ # Indexes are appended for any more successive uses of the table name.
+ #
+ # Post.joins(:comments)
+ # # => SELECT ... FROM posts INNER JOIN comments ON ...
+ # Post.joins(:special_comments) # STI
+ # # => SELECT ... FROM posts INNER JOIN comments ON ... AND comments.type = 'SpecialComment'
+ # Post.joins(:comments, :special_comments) # special_comments is the reflection name, posts is the parent table name
+ # # => SELECT ... FROM posts INNER JOIN comments ON ... INNER JOIN comments special_comments_posts
+ #
+ # Acts as tree example:
+ #
+ # TreeMixin.joins(:children)
+ # # => SELECT ... FROM mixins INNER JOIN mixins childrens_mixins ...
+ # TreeMixin.joins(children: :parent)
+ # # => SELECT ... FROM mixins INNER JOIN mixins childrens_mixins ...
+ # INNER JOIN parents_mixins ...
+ # TreeMixin.joins(children: {parent: :children})
+ # # => SELECT ... FROM mixins INNER JOIN mixins childrens_mixins ...
+ # INNER JOIN parents_mixins ...
+ # INNER JOIN mixins childrens_mixins_2
+ #
+ # Has and Belongs to Many join tables use the same idea, but add a <tt>_join</tt> suffix:
+ #
+ # Post.joins(:categories)
+ # # => SELECT ... FROM posts INNER JOIN categories_posts ... INNER JOIN categories ...
+ # Post.joins(categories: :posts)
+ # # => SELECT ... FROM posts INNER JOIN categories_posts ... INNER JOIN categories ...
+ # INNER JOIN categories_posts posts_categories_join INNER JOIN posts posts_categories
+ # Post.joins(categories: {posts: :categories})
+ # # => SELECT ... FROM posts INNER JOIN categories_posts ... INNER JOIN categories ...
+ # INNER JOIN categories_posts posts_categories_join INNER JOIN posts posts_categories
+ # INNER JOIN categories_posts categories_posts_join INNER JOIN categories categories_posts_2
+ #
+ # If you wish to specify your own custom joins using ActiveRecord::QueryMethods#joins method, those table
+ # names will take precedence over the eager associations:
+ #
+ # Post.joins(:comments).joins("inner join comments ...")
+ # # => SELECT ... FROM posts INNER JOIN comments_posts ON ... INNER JOIN comments ...
+ # Post.joins(:comments, :special_comments).joins("inner join comments ...")
+ # # => SELECT ... FROM posts INNER JOIN comments comments_posts ON ...
+ # INNER JOIN comments special_comments_posts ...
+ # INNER JOIN comments ...
+ #
+ # Table aliases are automatically truncated according to the maximum length of table identifiers
+ # according to the specific database.
+ #
+ # == Modules
+ #
+ # By default, associations will look for objects within the current module scope. Consider:
+ #
+ # module MyApplication
+ # module Business
+ # class Firm < ActiveRecord::Base
+ # has_many :clients
+ # end
+ #
+ # class Client < ActiveRecord::Base; end
+ # end
+ # end
+ #
+ # When <tt>Firm#clients</tt> is called, it will in turn call
+ # <tt>MyApplication::Business::Client.find_all_by_firm_id(firm.id)</tt>.
+ # If you want to associate with a class in another module scope, this can be done by
+ # specifying the complete class name.
+ #
+ # module MyApplication
+ # module Business
+ # class Firm < ActiveRecord::Base; end
+ # end
+ #
+ # module Billing
+ # class Account < ActiveRecord::Base
+ # belongs_to :firm, class_name: "MyApplication::Business::Firm"
+ # end
+ # end
+ # end
+ #
+ # == Bi-directional associations
+ #
+ # When you specify an association, there is usually an association on the associated model
+ # that specifies the same relationship in reverse. For example, with the following models:
+ #
+ # class Dungeon < ActiveRecord::Base
+ # has_many :traps
+ # has_one :evil_wizard
+ # end
+ #
+ # class Trap < ActiveRecord::Base
+ # belongs_to :dungeon
+ # end
+ #
+ # class EvilWizard < ActiveRecord::Base
+ # belongs_to :dungeon
+ # end
+ #
+ # The +traps+ association on +Dungeon+ and the +dungeon+ association on +Trap+ are
+ # the inverse of each other, and the inverse of the +dungeon+ association on +EvilWizard+
+ # is the +evil_wizard+ association on +Dungeon+ (and vice-versa). By default,
+ # Active Record can guess the inverse of the association based on the name
+ # of the class. The result is the following:
+ #
+ # d = Dungeon.first
+ # t = d.traps.first
+ # d.object_id == t.dungeon.object_id # => true
+ #
+ # The +Dungeon+ instances +d+ and <tt>t.dungeon</tt> in the above example refer to
+ # the same in-memory instance since the association matches the name of the class.
+ # The result would be the same if we added +:inverse_of+ to our model definitions:
+ #
+ # class Dungeon < ActiveRecord::Base
+ # has_many :traps, inverse_of: :dungeon
+ # has_one :evil_wizard, inverse_of: :dungeon
+ # end
+ #
+ # class Trap < ActiveRecord::Base
+ # belongs_to :dungeon, inverse_of: :traps
+ # end
+ #
+ # class EvilWizard < ActiveRecord::Base
+ # belongs_to :dungeon, inverse_of: :evil_wizard
+ # end
+ #
+ # For more information, see the documentation for the +:inverse_of+ option.
+ #
+ # == Deleting from associations
+ #
+ # === Dependent associations
+ #
+ # #has_many, #has_one, and #belongs_to associations support the <tt>:dependent</tt> option.
+ # This allows you to specify that associated records should be deleted when the owner is
+ # deleted.
+ #
+ # For example:
+ #
+ # class Author
+ # has_many :posts, dependent: :destroy
+ # end
+ # Author.find(1).destroy # => Will destroy all of the author's posts, too
+ #
+ # The <tt>:dependent</tt> option can have different values which specify how the deletion
+ # is done. For more information, see the documentation for this option on the different
+ # specific association types. When no option is given, the behavior is to do nothing
+ # with the associated records when destroying a record.
+ #
+ # Note that <tt>:dependent</tt> is implemented using Rails' callback
+ # system, which works by processing callbacks in order. Therefore, other
+ # callbacks declared either before or after the <tt>:dependent</tt> option
+ # can affect what it does.
+ #
+ # Note that <tt>:dependent</tt> option is ignored for #has_one <tt>:through</tt> associations.
+ #
+ # === Delete or destroy?
+ #
+ # #has_many and #has_and_belongs_to_many associations have the methods <tt>destroy</tt>,
+ # <tt>delete</tt>, <tt>destroy_all</tt> and <tt>delete_all</tt>.
+ #
+ # For #has_and_belongs_to_many, <tt>delete</tt> and <tt>destroy</tt> are the same: they
+ # cause the records in the join table to be removed.
+ #
+ # For #has_many, <tt>destroy</tt> and <tt>destroy_all</tt> will always call the <tt>destroy</tt> method of the
+ # record(s) being removed so that callbacks are run. However <tt>delete</tt> and <tt>delete_all</tt> will either
+ # do the deletion according to the strategy specified by the <tt>:dependent</tt> option, or
+ # if no <tt>:dependent</tt> option is given, then it will follow the default strategy.
+ # The default strategy is to do nothing (leave the foreign keys with the parent ids set), except for
+ # #has_many <tt>:through</tt>, where the default strategy is <tt>delete_all</tt> (delete
+ # the join records, without running their callbacks).
+ #
+ # There is also a <tt>clear</tt> method which is the same as <tt>delete_all</tt>, except that
+ # it returns the association rather than the records which have been deleted.
+ #
+ # === What gets deleted?
+ #
+ # There is a potential pitfall here: #has_and_belongs_to_many and #has_many <tt>:through</tt>
+ # associations have records in join tables, as well as the associated records. So when we
+ # call one of these deletion methods, what exactly should be deleted?
+ #
+ # The answer is that it is assumed that deletion on an association is about removing the
+ # <i>link</i> between the owner and the associated object(s), rather than necessarily the
+ # associated objects themselves. So with #has_and_belongs_to_many and #has_many
+ # <tt>:through</tt>, the join records will be deleted, but the associated records won't.
+ #
+ # This makes sense if you think about it: if you were to call <tt>post.tags.delete(Tag.find_by(name: 'food'))</tt>
+ # you would want the 'food' tag to be unlinked from the post, rather than for the tag itself
+ # to be removed from the database.
+ #
+ # However, there are examples where this strategy doesn't make sense. For example, suppose
+ # a person has many projects, and each project has many tasks. If we deleted one of a person's
+ # tasks, we would probably not want the project to be deleted. In this scenario, the delete method
+ # won't actually work: it can only be used if the association on the join model is a
+ # #belongs_to. In other situations you are expected to perform operations directly on
+ # either the associated records or the <tt>:through</tt> association.
+ #
+ # With a regular #has_many there is no distinction between the "associated records"
+ # and the "link", so there is only one choice for what gets deleted.
+ #
+ # With #has_and_belongs_to_many and #has_many <tt>:through</tt>, if you want to delete the
+ # associated records themselves, you can always do something along the lines of
+ # <tt>person.tasks.each(&:destroy)</tt>.
+ #
+ # == Type safety with ActiveRecord::AssociationTypeMismatch
+ #
+ # If you attempt to assign an object to an association that doesn't match the inferred
+ # or specified <tt>:class_name</tt>, you'll get an ActiveRecord::AssociationTypeMismatch.
+ #
+ # == Options
+ #
+ # All of the association macros can be specialized through options. This makes cases
+ # more complex than the simple and guessable ones possible.
+ module ClassMethods
+ # Specifies a one-to-many association. The following methods for retrieval and query of
+ # collections of associated objects will be added:
+ #
+ # +collection+ is a placeholder for the symbol passed as the +name+ argument, so
+ # <tt>has_many :clients</tt> would add among others <tt>clients.empty?</tt>.
+ #
+ # [collection]
+ # Returns a Relation of all the associated objects.
+ # An empty Relation is returned if none are found.
+ # [collection<<(object, ...)]
+ # Adds one or more objects to the collection by setting their foreign keys to the collection's primary key.
+ # Note that this operation instantly fires update SQL without waiting for the save or update call on the
+ # parent object, unless the parent object is a new record.
+ # This will also run validations and callbacks of associated object(s).
+ # [collection.delete(object, ...)]
+ # Removes one or more objects from the collection by setting their foreign keys to +NULL+.
+ # Objects will be in addition destroyed if they're associated with <tt>dependent: :destroy</tt>,
+ # and deleted if they're associated with <tt>dependent: :delete_all</tt>.
+ #
+ # If the <tt>:through</tt> option is used, then the join records are deleted (rather than
+ # nullified) by default, but you can specify <tt>dependent: :destroy</tt> or
+ # <tt>dependent: :nullify</tt> to override this.
+ # [collection.destroy(object, ...)]
+ # Removes one or more objects from the collection by running <tt>destroy</tt> on
+ # each record, regardless of any dependent option, ensuring callbacks are run.
+ #
+ # If the <tt>:through</tt> option is used, then the join records are destroyed
+ # instead, not the objects themselves.
+ # [collection=objects]
+ # Replaces the collections content by deleting and adding objects as appropriate. If the <tt>:through</tt>
+ # option is true callbacks in the join models are triggered except destroy callbacks, since deletion is
+ # direct by default. You can specify <tt>dependent: :destroy</tt> or
+ # <tt>dependent: :nullify</tt> to override this.
+ # [collection_singular_ids]
+ # Returns an array of the associated objects' ids
+ # [collection_singular_ids=ids]
+ # Replace the collection with the objects identified by the primary keys in +ids+. This
+ # method loads the models and calls <tt>collection=</tt>. See above.
+ # [collection.clear]
+ # Removes every object from the collection. This destroys the associated objects if they
+ # are associated with <tt>dependent: :destroy</tt>, deletes them directly from the
+ # database if <tt>dependent: :delete_all</tt>, otherwise sets their foreign keys to +NULL+.
+ # If the <tt>:through</tt> option is true no destroy callbacks are invoked on the join models.
+ # Join models are directly deleted.
+ # [collection.empty?]
+ # Returns +true+ if there are no associated objects.
+ # [collection.size]
+ # Returns the number of associated objects.
+ # [collection.find(...)]
+ # Finds an associated object according to the same rules as ActiveRecord::FinderMethods#find.
+ # [collection.exists?(...)]
+ # Checks whether an associated object with the given conditions exists.
+ # Uses the same rules as ActiveRecord::FinderMethods#exists?.
+ # [collection.build(attributes = {}, ...)]
+ # Returns one or more new objects of the collection type that have been instantiated
+ # with +attributes+ and linked to this object through a foreign key, but have not yet
+ # been saved.
+ # [collection.create(attributes = {})]
+ # Returns a new object of the collection type that has been instantiated
+ # with +attributes+, linked to this object through a foreign key, and that has already
+ # been saved (if it passed the validation). *Note*: This only works if the base model
+ # already exists in the DB, not if it is a new (unsaved) record!
+ # [collection.create!(attributes = {})]
+ # Does the same as <tt>collection.create</tt>, but raises ActiveRecord::RecordInvalid
+ # if the record is invalid.
+ # [collection.reload]
+ # Returns a Relation of all of the associated objects, forcing a database read.
+ # An empty Relation is returned if none are found.
+ #
+ # === Example
+ #
+ # A <tt>Firm</tt> class declares <tt>has_many :clients</tt>, which will add:
+ # * <tt>Firm#clients</tt> (similar to <tt>Client.where(firm_id: id)</tt>)
+ # * <tt>Firm#clients<<</tt>
+ # * <tt>Firm#clients.delete</tt>
+ # * <tt>Firm#clients.destroy</tt>
+ # * <tt>Firm#clients=</tt>
+ # * <tt>Firm#client_ids</tt>
+ # * <tt>Firm#client_ids=</tt>
+ # * <tt>Firm#clients.clear</tt>
+ # * <tt>Firm#clients.empty?</tt> (similar to <tt>firm.clients.size == 0</tt>)
+ # * <tt>Firm#clients.size</tt> (similar to <tt>Client.count "firm_id = #{id}"</tt>)
+ # * <tt>Firm#clients.find</tt> (similar to <tt>Client.where(firm_id: id).find(id)</tt>)
+ # * <tt>Firm#clients.exists?(name: 'ACME')</tt> (similar to <tt>Client.exists?(name: 'ACME', firm_id: firm.id)</tt>)
+ # * <tt>Firm#clients.build</tt> (similar to <tt>Client.new(firm_id: id)</tt>)
+ # * <tt>Firm#clients.create</tt> (similar to <tt>c = Client.new(firm_id: id); c.save; c</tt>)
+ # * <tt>Firm#clients.create!</tt> (similar to <tt>c = Client.new(firm_id: id); c.save!</tt>)
+ # * <tt>Firm#clients.reload</tt>
+ # The declaration can also include an +options+ hash to specialize the behavior of the association.
+ #
+ # === Scopes
+ #
+ # You can pass a second argument +scope+ as a callable (i.e. proc or
+ # lambda) to retrieve a specific set of records or customize the generated
+ # query when you access the associated collection.
+ #
+ # Scope examples:
+ # has_many :comments, -> { where(author_id: 1) }
+ # has_many :employees, -> { joins(:address) }
+ # has_many :posts, ->(blog) { where("max_post_length > ?", blog.max_post_length) }
+ #
+ # === Extensions
+ #
+ # The +extension+ argument allows you to pass a block into a has_many
+ # association. This is useful for adding new finders, creators and other
+ # factory-type methods to be used as part of the association.
+ #
+ # Extension examples:
+ # has_many :employees do
+ # def find_or_create_by_name(name)
+ # first_name, last_name = name.split(" ", 2)
+ # find_or_create_by(first_name: first_name, last_name: last_name)
+ # end
+ # end
+ #
+ # === Options
+ # [:class_name]
+ # Specify the class name of the association. Use it only if that name can't be inferred
+ # from the association name. So <tt>has_many :products</tt> will by default be linked
+ # to the +Product+ class, but if the real class name is +SpecialProduct+, you'll have to
+ # specify it with this option.
+ # [:foreign_key]
+ # Specify the foreign key used for the association. By default this is guessed to be the name
+ # of this class in lower-case and "_id" suffixed. So a Person class that makes a #has_many
+ # association will use "person_id" as the default <tt>:foreign_key</tt>.
+ #
+ # If you are going to modify the association (rather than just read from it), then it is
+ # a good idea to set the <tt>:inverse_of</tt> option.
+ # [:foreign_type]
+ # Specify the column used to store the associated object's type, if this is a polymorphic
+ # association. By default this is guessed to be the name of the polymorphic association
+ # specified on "as" option with a "_type" suffix. So a class that defines a
+ # <tt>has_many :tags, as: :taggable</tt> association will use "taggable_type" as the
+ # default <tt>:foreign_type</tt>.
+ # [:primary_key]
+ # Specify the name of the column to use as the primary key for the association. By default this is +id+.
+ # [:dependent]
+ # Controls what happens to the associated objects when
+ # their owner is destroyed. Note that these are implemented as
+ # callbacks, and Rails executes callbacks in order. Therefore, other
+ # similar callbacks may affect the <tt>:dependent</tt> behavior, and the
+ # <tt>:dependent</tt> behavior may affect other callbacks.
+ #
+ # * <tt>:destroy</tt> causes all the associated objects to also be destroyed.
+ # * <tt>:delete_all</tt> causes all the associated objects to be deleted directly from the database (so callbacks will not be executed).
+ # * <tt>:nullify</tt> causes the foreign keys to be set to +NULL+. Polymorphic type will also be nullified
+ # on polymorphic associations. Callbacks are not executed.
+ # * <tt>:restrict_with_exception</tt> causes an <tt>ActiveRecord::DeleteRestrictionError</tt> exception to be raised if there are any associated records.
+ # * <tt>:restrict_with_error</tt> causes an error to be added to the owner if there are any associated objects.
+ #
+ # If using with the <tt>:through</tt> option, the association on the join model must be
+ # a #belongs_to, and the records which get deleted are the join records, rather than
+ # the associated records.
+ #
+ # If using <tt>dependent: :destroy</tt> on a scoped association, only the scoped objects are destroyed.
+ # For example, if a Post model defines
+ # <tt>has_many :comments, -> { where published: true }, dependent: :destroy</tt> and <tt>destroy</tt> is
+ # called on a post, only published comments are destroyed. This means that any unpublished comments in the
+ # database would still contain a foreign key pointing to the now deleted post.
+ # [:counter_cache]
+ # This option can be used to configure a custom named <tt>:counter_cache.</tt> You only need this option,
+ # when you customized the name of your <tt>:counter_cache</tt> on the #belongs_to association.
+ # [:as]
+ # Specifies a polymorphic interface (See #belongs_to).
+ # [:through]
+ # Specifies an association through which to perform the query. This can be any other type
+ # of association, including other <tt>:through</tt> associations. Options for <tt>:class_name</tt>,
+ # <tt>:primary_key</tt> and <tt>:foreign_key</tt> are ignored, as the association uses the
+ # source reflection.
+ #
+ # If the association on the join model is a #belongs_to, the collection can be modified
+ # and the records on the <tt>:through</tt> model will be automatically created and removed
+ # as appropriate. Otherwise, the collection is read-only, so you should manipulate the
+ # <tt>:through</tt> association directly.
+ #
+ # If you are going to modify the association (rather than just read from it), then it is
+ # a good idea to set the <tt>:inverse_of</tt> option on the source association on the
+ # join model. This allows associated records to be built which will automatically create
+ # the appropriate join model records when they are saved. (See the 'Association Join Models'
+ # section above.)
+ # [:source]
+ # Specifies the source association name used by #has_many <tt>:through</tt> queries.
+ # Only use it if the name cannot be inferred from the association.
+ # <tt>has_many :subscribers, through: :subscriptions</tt> will look for either <tt>:subscribers</tt> or
+ # <tt>:subscriber</tt> on Subscription, unless a <tt>:source</tt> is given.
+ # [:source_type]
+ # Specifies type of the source association used by #has_many <tt>:through</tt> queries where the source
+ # association is a polymorphic #belongs_to.
+ # [:validate]
+ # When set to +true+, validates new objects added to association when saving the parent object. +true+ by default.
+ # If you want to ensure associated objects are revalidated on every update, use +validates_associated+.
+ # [:autosave]
+ # If true, always save the associated objects or destroy them if marked for destruction,
+ # when saving the parent object. If false, never save or destroy the associated objects.
+ # By default, only save associated objects that are new records. This option is implemented as a
+ # +before_save+ callback. Because callbacks are run in the order they are defined, associated objects
+ # may need to be explicitly saved in any user-defined +before_save+ callbacks.
+ #
+ # Note that NestedAttributes::ClassMethods#accepts_nested_attributes_for sets
+ # <tt>:autosave</tt> to <tt>true</tt>.
+ # [:inverse_of]
+ # Specifies the name of the #belongs_to association on the associated object
+ # that is the inverse of this #has_many association.
+ # See ActiveRecord::Associations::ClassMethods's overview on Bi-directional associations for more detail.
+ # [:extend]
+ # Specifies a module or array of modules that will be extended into the association object returned.
+ # Useful for defining methods on associations, especially when they should be shared between multiple
+ # association objects.
+ #
+ # Option examples:
+ # has_many :comments, -> { order("posted_on") }
+ # has_many :comments, -> { includes(:author) }
+ # has_many :people, -> { where(deleted: false).order("name") }, class_name: "Person"
+ # has_many :tracks, -> { order("position") }, dependent: :destroy
+ # has_many :comments, dependent: :nullify
+ # has_many :tags, as: :taggable
+ # has_many :reports, -> { readonly }
+ # has_many :subscribers, through: :subscriptions, source: :user
+ def has_many(name, scope = nil, **options, &extension)
+ reflection = Builder::HasMany.build(self, name, scope, options, &extension)
+ Reflection.add_reflection self, name, reflection
+ end
+
+ # Specifies a one-to-one association with another class. This method should only be used
+ # if the other class contains the foreign key. If the current class contains the foreign key,
+ # then you should use #belongs_to instead. See also ActiveRecord::Associations::ClassMethods's overview
+ # on when to use #has_one and when to use #belongs_to.
+ #
+ # The following methods for retrieval and query of a single associated object will be added:
+ #
+ # +association+ is a placeholder for the symbol passed as the +name+ argument, so
+ # <tt>has_one :manager</tt> would add among others <tt>manager.nil?</tt>.
+ #
+ # [association]
+ # Returns the associated object. +nil+ is returned if none is found.
+ # [association=(associate)]
+ # Assigns the associate object, extracts the primary key, sets it as the foreign key,
+ # and saves the associate object. To avoid database inconsistencies, permanently deletes an existing
+ # associated object when assigning a new one, even if the new one isn't saved to database.
+ # [build_association(attributes = {})]
+ # Returns a new object of the associated type that has been instantiated
+ # with +attributes+ and linked to this object through a foreign key, but has not
+ # yet been saved.
+ # [create_association(attributes = {})]
+ # Returns a new object of the associated type that has been instantiated
+ # with +attributes+, linked to this object through a foreign key, and that
+ # has already been saved (if it passed the validation).
+ # [create_association!(attributes = {})]
+ # Does the same as <tt>create_association</tt>, but raises ActiveRecord::RecordInvalid
+ # if the record is invalid.
+ # [reload_association]
+ # Returns the associated object, forcing a database read.
+ #
+ # === Example
+ #
+ # An Account class declares <tt>has_one :beneficiary</tt>, which will add:
+ # * <tt>Account#beneficiary</tt> (similar to <tt>Beneficiary.where(account_id: id).first</tt>)
+ # * <tt>Account#beneficiary=(beneficiary)</tt> (similar to <tt>beneficiary.account_id = account.id; beneficiary.save</tt>)
+ # * <tt>Account#build_beneficiary</tt> (similar to <tt>Beneficiary.new(account_id: id)</tt>)
+ # * <tt>Account#create_beneficiary</tt> (similar to <tt>b = Beneficiary.new(account_id: id); b.save; b</tt>)
+ # * <tt>Account#create_beneficiary!</tt> (similar to <tt>b = Beneficiary.new(account_id: id); b.save!; b</tt>)
+ # * <tt>Account#reload_beneficiary</tt>
+ #
+ # === Scopes
+ #
+ # You can pass a second argument +scope+ as a callable (i.e. proc or
+ # lambda) to retrieve a specific record or customize the generated query
+ # when you access the associated object.
+ #
+ # Scope examples:
+ # has_one :author, -> { where(comment_id: 1) }
+ # has_one :employer, -> { joins(:company) }
+ # has_one :latest_post, ->(blog) { where("created_at > ?", blog.enabled_at) }
+ #
+ # === Options
+ #
+ # The declaration can also include an +options+ hash to specialize the behavior of the association.
+ #
+ # Options are:
+ # [:class_name]
+ # Specify the class name of the association. Use it only if that name can't be inferred
+ # from the association name. So <tt>has_one :manager</tt> will by default be linked to the Manager class, but
+ # if the real class name is Person, you'll have to specify it with this option.
+ # [:dependent]
+ # Controls what happens to the associated object when
+ # its owner is destroyed:
+ #
+ # * <tt>:destroy</tt> causes the associated object to also be destroyed
+ # * <tt>:delete</tt> causes the associated object to be deleted directly from the database (so callbacks will not execute)
+ # * <tt>:nullify</tt> causes the foreign key to be set to +NULL+. Polymorphic type column is also nullified
+ # on polymorphic associations. Callbacks are not executed.
+ # * <tt>:restrict_with_exception</tt> causes an <tt>ActiveRecord::DeleteRestrictionError</tt> exception to be raised if there is an associated record
+ # * <tt>:restrict_with_error</tt> causes an error to be added to the owner if there is an associated object
+ #
+ # Note that <tt>:dependent</tt> option is ignored when using <tt>:through</tt> option.
+ # [:foreign_key]
+ # Specify the foreign key used for the association. By default this is guessed to be the name
+ # of this class in lower-case and "_id" suffixed. So a Person class that makes a #has_one association
+ # will use "person_id" as the default <tt>:foreign_key</tt>.
+ #
+ # If you are going to modify the association (rather than just read from it), then it is
+ # a good idea to set the <tt>:inverse_of</tt> option.
+ # [:foreign_type]
+ # Specify the column used to store the associated object's type, if this is a polymorphic
+ # association. By default this is guessed to be the name of the polymorphic association
+ # specified on "as" option with a "_type" suffix. So a class that defines a
+ # <tt>has_one :tag, as: :taggable</tt> association will use "taggable_type" as the
+ # default <tt>:foreign_type</tt>.
+ # [:primary_key]
+ # Specify the method that returns the primary key used for the association. By default this is +id+.
+ # [:as]
+ # Specifies a polymorphic interface (See #belongs_to).
+ # [:through]
+ # Specifies a Join Model through which to perform the query. Options for <tt>:class_name</tt>,
+ # <tt>:primary_key</tt>, and <tt>:foreign_key</tt> are ignored, as the association uses the
+ # source reflection. You can only use a <tt>:through</tt> query through a #has_one
+ # or #belongs_to association on the join model.
+ #
+ # If you are going to modify the association (rather than just read from it), then it is
+ # a good idea to set the <tt>:inverse_of</tt> option.
+ # [:source]
+ # Specifies the source association name used by #has_one <tt>:through</tt> queries.
+ # Only use it if the name cannot be inferred from the association.
+ # <tt>has_one :favorite, through: :favorites</tt> will look for a
+ # <tt>:favorite</tt> on Favorite, unless a <tt>:source</tt> is given.
+ # [:source_type]
+ # Specifies type of the source association used by #has_one <tt>:through</tt> queries where the source
+ # association is a polymorphic #belongs_to.
+ # [:validate]
+ # When set to +true+, validates new objects added to association when saving the parent object. +false+ by default.
+ # If you want to ensure associated objects are revalidated on every update, use +validates_associated+.
+ # [:autosave]
+ # If true, always save the associated object or destroy it if marked for destruction,
+ # when saving the parent object. If false, never save or destroy the associated object.
+ # By default, only save the associated object if it's a new record.
+ #
+ # Note that NestedAttributes::ClassMethods#accepts_nested_attributes_for sets
+ # <tt>:autosave</tt> to <tt>true</tt>.
+ # [:inverse_of]
+ # Specifies the name of the #belongs_to association on the associated object
+ # that is the inverse of this #has_one association.
+ # See ActiveRecord::Associations::ClassMethods's overview on Bi-directional associations for more detail.
+ # [:required]
+ # When set to +true+, the association will also have its presence validated.
+ # This will validate the association itself, not the id. You can use
+ # +:inverse_of+ to avoid an extra query during validation.
+ #
+ # Option examples:
+ # has_one :credit_card, dependent: :destroy # destroys the associated credit card
+ # has_one :credit_card, dependent: :nullify # updates the associated records foreign
+ # # key value to NULL rather than destroying it
+ # has_one :last_comment, -> { order('posted_on') }, class_name: "Comment"
+ # has_one :project_manager, -> { where(role: 'project_manager') }, class_name: "Person"
+ # has_one :attachment, as: :attachable
+ # has_one :boss, -> { readonly }
+ # has_one :club, through: :membership
+ # has_one :primary_address, -> { where(primary: true) }, through: :addressables, source: :addressable
+ # has_one :credit_card, required: true
+ def has_one(name, scope = nil, **options)
+ reflection = Builder::HasOne.build(self, name, scope, options)
+ Reflection.add_reflection self, name, reflection
end
- habtm_reflection = ActiveRecord::Reflection::HasAndBelongsToManyReflection.new(name, scope, options, self)
+ # Specifies a one-to-one association with another class. This method should only be used
+ # if this class contains the foreign key. If the other class contains the foreign key,
+ # then you should use #has_one instead. See also ActiveRecord::Associations::ClassMethods's overview
+ # on when to use #has_one and when to use #belongs_to.
+ #
+ # Methods will be added for retrieval and query for a single associated object, for which
+ # this object holds an id:
+ #
+ # +association+ is a placeholder for the symbol passed as the +name+ argument, so
+ # <tt>belongs_to :author</tt> would add among others <tt>author.nil?</tt>.
+ #
+ # [association]
+ # Returns the associated object. +nil+ is returned if none is found.
+ # [association=(associate)]
+ # Assigns the associate object, extracts the primary key, and sets it as the foreign key.
+ # No modification or deletion of existing records takes place.
+ # [build_association(attributes = {})]
+ # Returns a new object of the associated type that has been instantiated
+ # with +attributes+ and linked to this object through a foreign key, but has not yet been saved.
+ # [create_association(attributes = {})]
+ # Returns a new object of the associated type that has been instantiated
+ # with +attributes+, linked to this object through a foreign key, and that
+ # has already been saved (if it passed the validation).
+ # [create_association!(attributes = {})]
+ # Does the same as <tt>create_association</tt>, but raises ActiveRecord::RecordInvalid
+ # if the record is invalid.
+ # [reload_association]
+ # Returns the associated object, forcing a database read.
+ #
+ # === Example
+ #
+ # A Post class declares <tt>belongs_to :author</tt>, which will add:
+ # * <tt>Post#author</tt> (similar to <tt>Author.find(author_id)</tt>)
+ # * <tt>Post#author=(author)</tt> (similar to <tt>post.author_id = author.id</tt>)
+ # * <tt>Post#build_author</tt> (similar to <tt>post.author = Author.new</tt>)
+ # * <tt>Post#create_author</tt> (similar to <tt>post.author = Author.new; post.author.save; post.author</tt>)
+ # * <tt>Post#create_author!</tt> (similar to <tt>post.author = Author.new; post.author.save!; post.author</tt>)
+ # * <tt>Post#reload_author</tt>
+ # The declaration can also include an +options+ hash to specialize the behavior of the association.
+ #
+ # === Scopes
+ #
+ # You can pass a second argument +scope+ as a callable (i.e. proc or
+ # lambda) to retrieve a specific record or customize the generated query
+ # when you access the associated object.
+ #
+ # Scope examples:
+ # belongs_to :firm, -> { where(id: 2) }
+ # belongs_to :user, -> { joins(:friends) }
+ # belongs_to :level, ->(game) { where("game_level > ?", game.current_level) }
+ #
+ # === Options
+ #
+ # [:class_name]
+ # Specify the class name of the association. Use it only if that name can't be inferred
+ # from the association name. So <tt>belongs_to :author</tt> will by default be linked to the Author class, but
+ # if the real class name is Person, you'll have to specify it with this option.
+ # [:foreign_key]
+ # Specify the foreign key used for the association. By default this is guessed to be the name
+ # of the association with an "_id" suffix. So a class that defines a <tt>belongs_to :person</tt>
+ # association will use "person_id" as the default <tt>:foreign_key</tt>. Similarly,
+ # <tt>belongs_to :favorite_person, class_name: "Person"</tt> will use a foreign key
+ # of "favorite_person_id".
+ #
+ # If you are going to modify the association (rather than just read from it), then it is
+ # a good idea to set the <tt>:inverse_of</tt> option.
+ # [:foreign_type]
+ # Specify the column used to store the associated object's type, if this is a polymorphic
+ # association. By default this is guessed to be the name of the association with a "_type"
+ # suffix. So a class that defines a <tt>belongs_to :taggable, polymorphic: true</tt>
+ # association will use "taggable_type" as the default <tt>:foreign_type</tt>.
+ # [:primary_key]
+ # Specify the method that returns the primary key of associated object used for the association.
+ # By default this is +id+.
+ # [:dependent]
+ # If set to <tt>:destroy</tt>, the associated object is destroyed when this object is. If set to
+ # <tt>:delete</tt>, the associated object is deleted *without* calling its destroy method.
+ # This option should not be specified when #belongs_to is used in conjunction with
+ # a #has_many relationship on another class because of the potential to leave
+ # orphaned records behind.
+ # [:counter_cache]
+ # Caches the number of belonging objects on the associate class through the use of CounterCache::ClassMethods#increment_counter
+ # and CounterCache::ClassMethods#decrement_counter. The counter cache is incremented when an object of this
+ # class is created and decremented when it's destroyed. This requires that a column
+ # named <tt>#{table_name}_count</tt> (such as +comments_count+ for a belonging Comment class)
+ # is used on the associate class (such as a Post class) - that is the migration for
+ # <tt>#{table_name}_count</tt> is created on the associate class (such that <tt>Post.comments_count</tt> will
+ # return the count cached, see note below). You can also specify a custom counter
+ # cache column by providing a column name instead of a +true+/+false+ value to this
+ # option (e.g., <tt>counter_cache: :my_custom_counter</tt>.)
+ # Note: Specifying a counter cache will add it to that model's list of readonly attributes
+ # using +attr_readonly+.
+ # [:polymorphic]
+ # Specify this association is a polymorphic association by passing +true+.
+ # Note: If you've enabled the counter cache, then you may want to add the counter cache attribute
+ # to the +attr_readonly+ list in the associated classes (e.g. <tt>class Post; attr_readonly :comments_count; end</tt>).
+ # [:validate]
+ # When set to +true+, validates new objects added to association when saving the parent object. +false+ by default.
+ # If you want to ensure associated objects are revalidated on every update, use +validates_associated+.
+ # [:autosave]
+ # If true, always save the associated object or destroy it if marked for destruction, when
+ # saving the parent object.
+ # If false, never save or destroy the associated object.
+ # By default, only save the associated object if it's a new record.
+ #
+ # Note that NestedAttributes::ClassMethods#accepts_nested_attributes_for
+ # sets <tt>:autosave</tt> to <tt>true</tt>.
+ # [:touch]
+ # If true, the associated object will be touched (the updated_at/on attributes set to current time)
+ # when this record is either saved or destroyed. If you specify a symbol, that attribute
+ # will be updated with the current time in addition to the updated_at/on attribute.
+ # Please note that with touching no validation is performed and only the +after_touch+,
+ # +after_commit+ and +after_rollback+ callbacks are executed.
+ # [:inverse_of]
+ # Specifies the name of the #has_one or #has_many association on the associated
+ # object that is the inverse of this #belongs_to association.
+ # See ActiveRecord::Associations::ClassMethods's overview on Bi-directional associations for more detail.
+ # [:optional]
+ # When set to +true+, the association will not have its presence validated.
+ # [:required]
+ # When set to +true+, the association will also have its presence validated.
+ # This will validate the association itself, not the id. You can use
+ # +:inverse_of+ to avoid an extra query during validation.
+ # NOTE: <tt>required</tt> is set to <tt>true</tt> by default and is deprecated. If
+ # you don't want to have association presence validated, use <tt>optional: true</tt>.
+ # [:default]
+ # Provide a callable (i.e. proc or lambda) to specify that the association should
+ # be initialized with a particular record before validation.
+ #
+ # Option examples:
+ # belongs_to :firm, foreign_key: "client_of"
+ # belongs_to :person, primary_key: "name", foreign_key: "person_name"
+ # belongs_to :author, class_name: "Person", foreign_key: "author_id"
+ # belongs_to :valid_coupon, ->(o) { where "discounts > ?", o.payments_count },
+ # class_name: "Coupon", foreign_key: "coupon_id"
+ # belongs_to :attachable, polymorphic: true
+ # belongs_to :project, -> { readonly }
+ # belongs_to :post, counter_cache: true
+ # belongs_to :comment, touch: true
+ # belongs_to :company, touch: :employees_last_updated_at
+ # belongs_to :user, optional: true
+ # belongs_to :account, default: -> { company.account }
+ def belongs_to(name, scope = nil, **options)
+ reflection = Builder::BelongsTo.build(self, name, scope, options)
+ Reflection.add_reflection self, name, reflection
+ end
- builder = Builder::HasAndBelongsToMany.new name, self, options
+ # Specifies a many-to-many relationship with another class. This associates two classes via an
+ # intermediate join table. Unless the join table is explicitly specified as an option, it is
+ # guessed using the lexical order of the class names. So a join between Developer and Project
+ # will give the default join table name of "developers_projects" because "D" precedes "P" alphabetically.
+ # Note that this precedence is calculated using the <tt><</tt> operator for String. This
+ # means that if the strings are of different lengths, and the strings are equal when compared
+ # up to the shortest length, then the longer string is considered of higher
+ # lexical precedence than the shorter one. For example, one would expect the tables "paper_boxes" and "papers"
+ # to generate a join table name of "papers_paper_boxes" because of the length of the name "paper_boxes",
+ # but it in fact generates a join table name of "paper_boxes_papers". Be aware of this caveat, and use the
+ # custom <tt>:join_table</tt> option if you need to.
+ # If your tables share a common prefix, it will only appear once at the beginning. For example,
+ # the tables "catalog_categories" and "catalog_products" generate a join table name of "catalog_categories_products".
+ #
+ # The join table should not have a primary key or a model associated with it. You must manually generate the
+ # join table with a migration such as this:
+ #
+ # class CreateDevelopersProjectsJoinTable < ActiveRecord::Migration[5.0]
+ # def change
+ # create_join_table :developers, :projects
+ # end
+ # end
+ #
+ # It's also a good idea to add indexes to each of those columns to speed up the joins process.
+ # However, in MySQL it is advised to add a compound index for both of the columns as MySQL only
+ # uses one index per table during the lookup.
+ #
+ # Adds the following methods for retrieval and query:
+ #
+ # +collection+ is a placeholder for the symbol passed as the +name+ argument, so
+ # <tt>has_and_belongs_to_many :categories</tt> would add among others <tt>categories.empty?</tt>.
+ #
+ # [collection]
+ # Returns a Relation of all the associated objects.
+ # An empty Relation is returned if none are found.
+ # [collection<<(object, ...)]
+ # Adds one or more objects to the collection by creating associations in the join table
+ # (<tt>collection.push</tt> and <tt>collection.concat</tt> are aliases to this method).
+ # Note that this operation instantly fires update SQL without waiting for the save or update call on the
+ # parent object, unless the parent object is a new record.
+ # [collection.delete(object, ...)]
+ # Removes one or more objects from the collection by removing their associations from the join table.
+ # This does not destroy the objects.
+ # [collection.destroy(object, ...)]
+ # Removes one or more objects from the collection by running destroy on each association in the join table, overriding any dependent option.
+ # This does not destroy the objects.
+ # [collection=objects]
+ # Replaces the collection's content by deleting and adding objects as appropriate.
+ # [collection_singular_ids]
+ # Returns an array of the associated objects' ids.
+ # [collection_singular_ids=ids]
+ # Replace the collection by the objects identified by the primary keys in +ids+.
+ # [collection.clear]
+ # Removes every object from the collection. This does not destroy the objects.
+ # [collection.empty?]
+ # Returns +true+ if there are no associated objects.
+ # [collection.size]
+ # Returns the number of associated objects.
+ # [collection.find(id)]
+ # Finds an associated object responding to the +id+ and that
+ # meets the condition that it has to be associated with this object.
+ # Uses the same rules as ActiveRecord::FinderMethods#find.
+ # [collection.exists?(...)]
+ # Checks whether an associated object with the given conditions exists.
+ # Uses the same rules as ActiveRecord::FinderMethods#exists?.
+ # [collection.build(attributes = {})]
+ # Returns a new object of the collection type that has been instantiated
+ # with +attributes+ and linked to this object through the join table, but has not yet been saved.
+ # [collection.create(attributes = {})]
+ # Returns a new object of the collection type that has been instantiated
+ # with +attributes+, linked to this object through the join table, and that has already been
+ # saved (if it passed the validation).
+ # [collection.reload]
+ # Returns a Relation of all of the associated objects, forcing a database read.
+ # An empty Relation is returned if none are found.
+ #
+ # === Example
+ #
+ # A Developer class declares <tt>has_and_belongs_to_many :projects</tt>, which will add:
+ # * <tt>Developer#projects</tt>
+ # * <tt>Developer#projects<<</tt>
+ # * <tt>Developer#projects.delete</tt>
+ # * <tt>Developer#projects.destroy</tt>
+ # * <tt>Developer#projects=</tt>
+ # * <tt>Developer#project_ids</tt>
+ # * <tt>Developer#project_ids=</tt>
+ # * <tt>Developer#projects.clear</tt>
+ # * <tt>Developer#projects.empty?</tt>
+ # * <tt>Developer#projects.size</tt>
+ # * <tt>Developer#projects.find(id)</tt>
+ # * <tt>Developer#projects.exists?(...)</tt>
+ # * <tt>Developer#projects.build</tt> (similar to <tt>Project.new(developer_id: id)</tt>)
+ # * <tt>Developer#projects.create</tt> (similar to <tt>c = Project.new(developer_id: id); c.save; c</tt>)
+ # * <tt>Developer#projects.reload</tt>
+ # The declaration may include an +options+ hash to specialize the behavior of the association.
+ #
+ # === Scopes
+ #
+ # You can pass a second argument +scope+ as a callable (i.e. proc or
+ # lambda) to retrieve a specific set of records or customize the generated
+ # query when you access the associated collection.
+ #
+ # Scope examples:
+ # has_and_belongs_to_many :projects, -> { includes(:milestones, :manager) }
+ # has_and_belongs_to_many :categories, ->(post) {
+ # where("default_category = ?", post.default_category)
+ # }
+ #
+ # === Extensions
+ #
+ # The +extension+ argument allows you to pass a block into a
+ # has_and_belongs_to_many association. This is useful for adding new
+ # finders, creators and other factory-type methods to be used as part of
+ # the association.
+ #
+ # Extension examples:
+ # has_and_belongs_to_many :contractors do
+ # def find_or_create_by_name(name)
+ # first_name, last_name = name.split(" ", 2)
+ # find_or_create_by(first_name: first_name, last_name: last_name)
+ # end
+ # end
+ #
+ # === Options
+ #
+ # [:class_name]
+ # Specify the class name of the association. Use it only if that name can't be inferred
+ # from the association name. So <tt>has_and_belongs_to_many :projects</tt> will by default be linked to the
+ # Project class, but if the real class name is SuperProject, you'll have to specify it with this option.
+ # [:join_table]
+ # Specify the name of the join table if the default based on lexical order isn't what you want.
+ # <b>WARNING:</b> If you're overwriting the table name of either class, the +table_name+ method
+ # MUST be declared underneath any #has_and_belongs_to_many declaration in order to work.
+ # [:foreign_key]
+ # Specify the foreign key used for the association. By default this is guessed to be the name
+ # of this class in lower-case and "_id" suffixed. So a Person class that makes
+ # a #has_and_belongs_to_many association to Project will use "person_id" as the
+ # default <tt>:foreign_key</tt>.
+ #
+ # If you are going to modify the association (rather than just read from it), then it is
+ # a good idea to set the <tt>:inverse_of</tt> option.
+ # [:association_foreign_key]
+ # Specify the foreign key used for the association on the receiving side of the association.
+ # By default this is guessed to be the name of the associated class in lower-case and "_id" suffixed.
+ # So if a Person class makes a #has_and_belongs_to_many association to Project,
+ # the association will use "project_id" as the default <tt>:association_foreign_key</tt>.
+ # [:validate]
+ # When set to +true+, validates new objects added to association when saving the parent object. +true+ by default.
+ # If you want to ensure associated objects are revalidated on every update, use +validates_associated+.
+ # [:autosave]
+ # If true, always save the associated objects or destroy them if marked for destruction, when
+ # saving the parent object.
+ # If false, never save or destroy the associated objects.
+ # By default, only save associated objects that are new records.
+ #
+ # Note that NestedAttributes::ClassMethods#accepts_nested_attributes_for sets
+ # <tt>:autosave</tt> to <tt>true</tt>.
+ #
+ # Option examples:
+ # has_and_belongs_to_many :projects
+ # has_and_belongs_to_many :projects, -> { includes(:milestones, :manager) }
+ # has_and_belongs_to_many :nations, class_name: "Country"
+ # has_and_belongs_to_many :categories, join_table: "prods_cats"
+ # has_and_belongs_to_many :categories, -> { readonly }
+ def has_and_belongs_to_many(name, scope = nil, **options, &extension)
+ habtm_reflection = ActiveRecord::Reflection::HasAndBelongsToManyReflection.new(name, scope, options, self)
- join_model = builder.through_model
+ builder = Builder::HasAndBelongsToMany.new name, self, options
- const_set join_model.name, join_model
- private_constant join_model.name
+ join_model = builder.through_model
- middle_reflection = builder.middle_reflection join_model
+ const_set join_model.name, join_model
+ private_constant join_model.name
- Builder::HasMany.define_callbacks self, middle_reflection
- Reflection.add_reflection self, middle_reflection.name, middle_reflection
- middle_reflection.parent_reflection = habtm_reflection
+ middle_reflection = builder.middle_reflection join_model
- include Module.new {
- class_eval <<-RUBY, __FILE__, __LINE__ + 1
- def destroy_associations
- association(:#{middle_reflection.name}).delete_all(:delete_all)
- association(:#{name}).reset
- super
- end
- RUBY
- }
+ Builder::HasMany.define_callbacks self, middle_reflection
+ Reflection.add_reflection self, middle_reflection.name, middle_reflection
+ middle_reflection.parent_reflection = habtm_reflection
- hm_options = {}
- hm_options[:through] = middle_reflection.name
- hm_options[:source] = join_model.right_reflection.name
+ include Module.new {
+ class_eval <<-RUBY, __FILE__, __LINE__ + 1
+ def destroy_associations
+ association(:#{middle_reflection.name}).delete_all(:delete_all)
+ association(:#{name}).reset
+ super
+ end
+ RUBY
+ }
- [:before_add, :after_add, :before_remove, :after_remove, :autosave, :validate, :join_table, :class_name, :extend].each do |k|
- hm_options[k] = options[k] if options.key? k
- end
+ hm_options = {}
+ hm_options[:through] = middle_reflection.name
+ hm_options[:source] = join_model.right_reflection.name
- has_many name, scope, hm_options, &extension
- self._reflections[name.to_s].parent_reflection = habtm_reflection
+ [:before_add, :after_add, :before_remove, :after_remove, :autosave, :validate, :join_table, :class_name, :extend].each do |k|
+ hm_options[k] = options[k] if options.key? k
+ end
+
+ has_many name, scope, hm_options, &extension
+ _reflections[name.to_s].parent_reflection = habtm_reflection
+ end
end
- end
end
end
diff --git a/activerecord/lib/active_record/associations/alias_tracker.rb b/activerecord/lib/active_record/associations/alias_tracker.rb
index 021bc32237..272eede824 100644
--- a/activerecord/lib/active_record/associations/alias_tracker.rb
+++ b/activerecord/lib/active_record/associations/alias_tracker.rb
@@ -1,51 +1,41 @@
-require 'active_support/core_ext/string/conversions'
+# frozen_string_literal: true
+
+require "active_support/core_ext/string/conversions"
module ActiveRecord
module Associations
# Keeps track of table aliases for ActiveRecord::Associations::JoinDependency
class AliasTracker # :nodoc:
- attr_reader :aliases
-
- def self.create(connection, initial_table, type_caster)
- aliases = Hash.new(0)
- aliases[initial_table] = 1
- new connection, aliases, type_caster
- end
-
- def self.create_with_joins(connection, initial_table, joins, type_caster)
+ def self.create(connection, initial_table, joins)
if joins.empty?
- create(connection, initial_table, type_caster)
+ aliases = Hash.new(0)
else
aliases = Hash.new { |h, k|
h[k] = initial_count_for(connection, k, joins)
}
- aliases[initial_table] = 1
- new connection, aliases, type_caster
end
+ aliases[initial_table] = 1
+ new(connection, aliases)
end
def self.initial_count_for(connection, name, table_joins)
- # quoted_name should be downcased as some database adapters (Oracle) return quoted name in uppercase
- quoted_name = connection.quote_table_name(name).downcase
+ quoted_name = nil
counts = table_joins.map do |join|
if join.is_a?(Arel::Nodes::StringJoin)
+ # quoted_name should be case ignored as some database adapters (Oracle) return quoted name in uppercase
+ quoted_name ||= connection.quote_table_name(name)
+
# Table names + table aliases
- join.left.downcase.scan(
- /join(?:\s+\w+)?\s+(\S+\s+)?#{quoted_name}\son/
+ join.left.scan(
+ /JOIN(?:\s+\w+)?\s+(?:\S+\s+)?(?:#{quoted_name}|#{name})\sON/i
).size
- elsif join.respond_to? :left
- join.left.table_name == name ? 1 : 0
+ elsif join.is_a?(Arel::Nodes::Join)
+ join.left.name == name ? 1 : 0
+ elsif join.is_a?(Hash)
+ join[name]
else
- # this branch is reached by two tests:
- #
- # activerecord/test/cases/associations/cascaded_eager_loading_test.rb:37
- # with :posts
- #
- # activerecord/test/cases/associations/eager_test.rb:1133
- # with :comments
- #
- 0
+ raise ArgumentError, "joins list should be initialized by list of Arel::Nodes::Join"
end
end
@@ -53,17 +43,16 @@ module ActiveRecord
end
# table_joins is an array of arel joins which might conflict with the aliases we assign here
- def initialize(connection, aliases, type_caster)
+ def initialize(connection, aliases)
@aliases = aliases
@connection = connection
- @type_caster = type_caster
end
- def aliased_table_for(table_name, aliased_name)
+ def aliased_table_for(table_name, aliased_name, type_caster)
if aliases[table_name].zero?
# If it's zero, we can have our table_name
aliases[table_name] = 1
- Arel::Table.new(table_name, type_caster: @type_caster)
+ Arel::Table.new(table_name, type_caster: type_caster)
else
# Otherwise, we need to use an alias
aliased_name = @connection.table_alias_for(aliased_name)
@@ -76,10 +65,12 @@ module ActiveRecord
else
aliased_name
end
- Arel::Table.new(table_name, type_caster: @type_caster).alias(table_alias)
+ Arel::Table.new(table_name, type_caster: type_caster).alias(table_alias)
end
end
+ attr_reader :aliases
+
private
def truncate(name)
diff --git a/activerecord/lib/active_record/associations/association.rb b/activerecord/lib/active_record/associations/association.rb
index f7edfbfb5f..0bb63b97ae 100644
--- a/activerecord/lib/active_record/associations/association.rb
+++ b/activerecord/lib/active_record/associations/association.rb
@@ -1,4 +1,6 @@
-require 'active_support/core_ext/array/wrap'
+# frozen_string_literal: true
+
+require "active_support/core_ext/array/wrap"
module ActiveRecord
module Associations
@@ -15,11 +17,27 @@ module ActiveRecord
# CollectionAssociation
# HasManyAssociation + ForeignAssociation
# HasManyThroughAssociation + ThroughAssociation
+ #
+ # Associations in Active Record are middlemen between the object that
+ # holds the association, known as the <tt>owner</tt>, and the associated
+ # result set, known as the <tt>target</tt>. Association metadata is available in
+ # <tt>reflection</tt>, which is an instance of <tt>ActiveRecord::Reflection::AssociationReflection</tt>.
+ #
+ # For example, given
+ #
+ # class Blog < ActiveRecord::Base
+ # has_many :posts
+ # end
+ #
+ # blog = Blog.first
+ #
+ # The association of <tt>blog.posts</tt> has the object +blog+ as its
+ # <tt>owner</tt>, the collection of its posts as <tt>target</tt>, and
+ # the <tt>reflection</tt> object represents a <tt>:has_many</tt> macro.
class Association #:nodoc:
attr_reader :owner, :target, :reflection
- attr_accessor :inversed
- delegate :options, :to => :reflection
+ delegate :options, to: :reflection
def initialize(owner, reflection)
reflection.check_validity!
@@ -30,14 +48,6 @@ module ActiveRecord
reset_scope
end
- # Returns the name of the table of the associated class:
- #
- # post.comments.aliased_table_name # => "comments"
- #
- def aliased_table_name
- klass.table_name
- end
-
# Resets the \loaded flag to +false+ and sets the \target to +nil+.
def reset
@loaded = false
@@ -47,7 +57,9 @@ module ActiveRecord
end
# Reloads the \target and returns +self+ on success.
- def reload
+ # The QueryCache is cleared if +force+ is true.
+ def reload(force = false)
+ klass.connection.clear_query_cache if force && klass
reset
reset_scope
load_target
@@ -73,7 +85,7 @@ module ActiveRecord
#
# Note that if the target has not been loaded, it is not considered stale.
def stale_target?
- !inversed && loaded? && @stale_state != stale_state
+ !@inversed && loaded? && @stale_state != stale_state
end
# Sets the target of this association to <tt>\target</tt>, and the \loaded flag to +true+.
@@ -83,19 +95,7 @@ module ActiveRecord
end
def scope
- target_scope.merge(association_scope)
- end
-
- # The scope for this association.
- #
- # Note that the association_scope is merged into the target_scope only when the
- # scope method is called. This is because at that point the call may be surrounded
- # by scope.scoping { ... } or with_scope { ... } etc, which affects the scope which
- # actually gets built.
- def association_scope
- if klass
- @association_scope ||= AssociationScope.scope(self, klass.connection)
- end
+ target_scope.merge!(association_scope)
end
def reset_scope
@@ -104,24 +104,46 @@ module ActiveRecord
# Set the inverse association, if possible
def set_inverse_instance(record)
- if invertible_for?(record)
- inverse = record.association(inverse_reflection_for(record).name)
- inverse.target = owner
- inverse.inversed = true
+ if inverse = inverse_association_for(record)
+ inverse.inversed_from(owner)
+ end
+ record
+ end
+
+ def set_inverse_instance_from_queries(record)
+ if inverse = inverse_association_for(record)
+ inverse.inversed_from_queries(owner)
end
record
end
+ # Remove the inverse association, if possible
+ def remove_inverse_instance(record)
+ if inverse = inverse_association_for(record)
+ inverse.inversed_from(nil)
+ end
+ end
+
+ def inversed_from(record)
+ self.target = record
+ @inversed = !!record
+ end
+ alias :inversed_from_queries :inversed_from
+
# Returns the class of the target. belongs_to polymorphic overrides this to look at the
# polymorphic_type field on the owner.
def klass
reflection.klass
end
- # Can be overridden (i.e. in ThroughAssociation) to merge in other scopes (i.e. the
- # through association's scope)
- def target_scope
- AssociationRelation.create(klass, klass.arel_table, klass.predicate_builder, self).merge!(klass.all)
+ def extensions
+ extensions = klass.default_extensions | reflection.extensions
+
+ if reflection.scope
+ extensions |= reflection.scope_for(klass.unscoped, owner).extensions
+ end
+
+ extensions
end
# Loads the \target if needed and returns it.
@@ -143,17 +165,9 @@ module ActiveRecord
reset
end
- def interpolate(sql, record = nil)
- if sql.respond_to?(:to_proc)
- owner.instance_exec(record, &sql)
- else
- sql
- end
- end
-
- # We can't dump @reflection since it contains the scope proc
+ # We can't dump @reflection and @through_reflection since it contains the scope proc
def marshal_dump
- ivars = (instance_variables - [:@reflection]).map { |name| [name, instance_variable_get(name)] }
+ ivars = (instance_variables - [:@reflection, :@through_reflection]).map { |name| [name, instance_variable_get(name)] }
[@reflection.name, ivars]
end
@@ -166,14 +180,57 @@ module ActiveRecord
def initialize_attributes(record, except_from_scope_attributes = nil) #:nodoc:
except_from_scope_attributes ||= {}
skip_assign = [reflection.foreign_key, reflection.type].compact
- assigned_keys = record.changed
+ assigned_keys = record.changed_attribute_names_to_save
assigned_keys += except_from_scope_attributes.keys.map(&:to_s)
- attributes = create_scope.except(*(assigned_keys - skip_assign))
- record.assign_attributes(attributes)
+ attributes = scope_for_create.except!(*(assigned_keys - skip_assign))
+ record.send(:_assign_attributes, attributes) if attributes.any?
set_inverse_instance(record)
end
+ def create(attributes = {}, &block)
+ _create_record(attributes, &block)
+ end
+
+ def create!(attributes = {}, &block)
+ _create_record(attributes, true, &block)
+ end
+
private
+ def find_target
+ scope = self.scope
+ return scope.to_a if skip_statement_cache?(scope)
+
+ conn = klass.connection
+ sc = reflection.association_scope_cache(conn, owner) do |params|
+ as = AssociationScope.create { params.bind }
+ target_scope.merge!(as.scope(self))
+ end
+
+ binds = AssociationScope.get_bind_values(owner, reflection.chain)
+ sc.execute(binds, conn) { |record| set_inverse_instance(record) } || []
+ end
+
+ # The scope for this association.
+ #
+ # Note that the association_scope is merged into the target_scope only when the
+ # scope method is called. This is because at that point the call may be surrounded
+ # by scope.scoping { ... } or unscoped { ... } etc, which affects the scope which
+ # actually gets built.
+ def association_scope
+ if klass
+ @association_scope ||= AssociationScope.scope(self)
+ end
+ end
+
+ # Can be overridden (i.e. in ThroughAssociation) to merge in other scopes (i.e. the
+ # through association's scope)
+ def target_scope
+ AssociationRelation.create(klass, self).merge!(klass.all)
+ end
+
+ def scope_for_create
+ scope.scope_for_create
+ end
def find_target?
!loaded? && (!owner.new_record? || foreign_key_present?) && klass
@@ -185,8 +242,8 @@ module ActiveRecord
if (reflection.has_one? || reflection.collection?) && !options[:through]
attributes[reflection.foreign_key] = owner[reflection.active_record_primary_key]
- if reflection.options[:as]
- attributes[reflection.type] = owner.class.base_class.name
+ if reflection.type
+ attributes[reflection.type] = owner.class.polymorphic_name
end
end
@@ -217,12 +274,19 @@ module ActiveRecord
unless record.is_a?(reflection.klass)
fresh_class = reflection.class_name.safe_constantize
unless fresh_class && record.is_a?(fresh_class)
- message = "#{reflection.class_name}(##{reflection.klass.object_id}) expected, got #{record.class}(##{record.class.object_id})"
+ message = "#{reflection.class_name}(##{reflection.klass.object_id}) expected, "\
+ "got #{record.inspect} which is an instance of #{record.class}(##{record.class.object_id})"
raise ActiveRecord::AssociationTypeMismatch, message
end
end
end
+ def inverse_association_for(record)
+ if invertible_for?(record)
+ record.association(inverse_reflection_for(record).name)
+ end
+ end
+
# Can be redefined by subclasses, notably polymorphic belongs_to
# The record parameter is necessary to support polymorphic inverses as we must check for
# the association in the specific class of the record.
@@ -245,18 +309,19 @@ module ActiveRecord
# so that when stale_state is different from the value stored on the last find_target,
# the target is stale.
#
- # This is only relevant to certain associations, which is why it returns nil by default.
+ # This is only relevant to certain associations, which is why it returns +nil+ by default.
def stale_state
end
def build_record(attributes)
reflection.build_association(attributes) do |record|
initialize_attributes(record, attributes)
+ yield(record) if block_given?
end
end
# Returns true if statement cache should be skipped on the association reader.
- def skip_statement_cache?
+ def skip_statement_cache?(scope)
reflection.has_scope? ||
scope.eager_loading? ||
klass.scope_attributes? ||
diff --git a/activerecord/lib/active_record/associations/association_scope.rb b/activerecord/lib/active_record/associations/association_scope.rb
index 882f1225fc..9e38380611 100644
--- a/activerecord/lib/active_record/associations/association_scope.rb
+++ b/activerecord/lib/active_record/associations/association_scope.rb
@@ -1,8 +1,10 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Associations
class AssociationScope #:nodoc:
- def self.scope(association, connection)
- INSTANCE.scope(association, connection)
+ def self.scope(association)
+ INSTANCE.scope(association)
end
def self.create(&block)
@@ -16,20 +18,17 @@ module ActiveRecord
INSTANCE = create
- def scope(association, connection)
+ def scope(association)
klass = association.klass
reflection = association.reflection
scope = klass.unscoped
owner = association.owner
- alias_tracker = AliasTracker.create connection, association.klass.table_name, klass.type_caster
- chain_head, chain_tail = get_chain(reflection, association, alias_tracker)
-
- scope.extending! Array(reflection.options[:extend])
- add_constraints(scope, owner, klass, reflection, chain_head, chain_tail)
- end
+ chain = get_chain(reflection, association, scope.alias_tracker)
- def join_type
- Arel::Nodes::InnerJoin
+ scope.extending! reflection.extensions
+ scope = add_constraints(scope, owner, chain)
+ scope.limit!(1) unless reflection.collection?
+ scope
end
def self.get_bind_values(owner, chain)
@@ -38,129 +37,130 @@ module ActiveRecord
binds << last_reflection.join_id_for(owner)
if last_reflection.type
- binds << owner.class.base_class.name
+ binds << owner.class.polymorphic_name
end
chain.each_cons(2).each do |reflection, next_reflection|
if reflection.type
- binds << next_reflection.klass.base_class.name
+ binds << next_reflection.klass.polymorphic_name
end
end
binds
end
- protected
+ private
+ attr_reader :value_transformation
- attr_reader :value_transformation
+ def join(table, constraint)
+ table.create_join(table, table.create_on(constraint))
+ end
- private
- def join(table, constraint)
- table.create_join(table, table.create_on(constraint), join_type)
- end
+ def last_chain_scope(scope, reflection, owner)
+ join_keys = reflection.join_keys
+ key = join_keys.key
+ foreign_key = join_keys.foreign_key
- def last_chain_scope(scope, table, reflection, owner, association_klass)
- join_keys = reflection.join_keys(association_klass)
- key = join_keys.key
- foreign_key = join_keys.foreign_key
+ table = reflection.aliased_table
+ value = transform_value(owner[foreign_key])
+ scope = apply_scope(scope, table, key, value)
- value = transform_value(owner[foreign_key])
- scope = scope.where(table.name.to_sym => { key => value })
+ if reflection.type
+ polymorphic_type = transform_value(owner.class.polymorphic_name)
+ scope = apply_scope(scope, table, reflection.type, polymorphic_type)
+ end
- if reflection.type
- polymorphic_type = transform_value(owner.class.base_class.name)
- scope = scope.where(table.name.to_sym => { reflection.type => polymorphic_type })
+ scope
end
- scope
- end
+ def transform_value(value)
+ value_transformation.call(value)
+ end
- def transform_value(value)
- value_transformation.call(value)
- end
+ def next_chain_scope(scope, reflection, next_reflection)
+ join_keys = reflection.join_keys
+ key = join_keys.key
+ foreign_key = join_keys.foreign_key
- def next_chain_scope(scope, table, reflection, association_klass, foreign_table, next_reflection)
- join_keys = reflection.join_keys(association_klass)
- key = join_keys.key
- foreign_key = join_keys.foreign_key
+ table = reflection.aliased_table
+ foreign_table = next_reflection.aliased_table
+ constraint = table[key].eq(foreign_table[foreign_key])
- constraint = table[key].eq(foreign_table[foreign_key])
+ if reflection.type
+ value = transform_value(next_reflection.klass.polymorphic_name)
+ scope = apply_scope(scope, table, reflection.type, value)
+ end
- if reflection.type
- value = transform_value(next_reflection.klass.base_class.name)
- scope = scope.where(table.name => { reflection.type => value })
+ scope.joins!(join(foreign_table, constraint))
end
- scope = scope.joins(join(foreign_table, constraint))
- end
+ class ReflectionProxy < SimpleDelegator # :nodoc:
+ attr_reader :aliased_table
- class ReflectionProxy < SimpleDelegator # :nodoc:
- attr_accessor :next
- attr_reader :alias_name
+ def initialize(reflection, aliased_table)
+ super(reflection)
+ @aliased_table = aliased_table
+ end
- def initialize(reflection, alias_name)
- super(reflection)
- @alias_name = alias_name
+ def all_includes; nil; end
end
- def all_includes; nil; end
- end
-
- def get_chain(reflection, association, tracker)
- name = reflection.name
- runtime_reflection = Reflection::RuntimeReflection.new(reflection, association)
- previous_reflection = runtime_reflection
- reflection.chain.drop(1).each do |refl|
- alias_name = tracker.aliased_table_for(refl.table_name, refl.alias_candidate(name))
- proxy = ReflectionProxy.new(refl, alias_name)
- previous_reflection.next = proxy
- previous_reflection = proxy
+ def get_chain(reflection, association, tracker)
+ name = reflection.name
+ chain = [Reflection::RuntimeReflection.new(reflection, association)]
+ reflection.chain.drop(1).each do |refl|
+ aliased_table = tracker.aliased_table_for(
+ refl.table_name,
+ refl.alias_candidate(name),
+ refl.klass.type_caster
+ )
+ chain << ReflectionProxy.new(refl, aliased_table)
+ end
+ chain
end
- [runtime_reflection, previous_reflection]
- end
- def add_constraints(scope, owner, association_klass, refl, chain_head, chain_tail)
- owner_reflection = chain_tail
- table = owner_reflection.alias_name
- scope = last_chain_scope(scope, table, owner_reflection, owner, association_klass)
+ def add_constraints(scope, owner, chain)
+ scope = last_chain_scope(scope, chain.last, owner)
- reflection = chain_head
- loop do
- break unless reflection
- table = reflection.alias_name
-
- unless reflection == chain_tail
- next_reflection = reflection.next
- foreign_table = next_reflection.alias_name
- scope = next_chain_scope(scope, table, reflection, association_klass, foreign_table, next_reflection)
+ chain.each_cons(2) do |reflection, next_reflection|
+ scope = next_chain_scope(scope, reflection, next_reflection)
end
- # Exclude the scope of the association itself, because that
- # was already merged in the #scope method.
- reflection.constraints.each do |scope_chain_item|
- item = eval_scope(reflection.klass, scope_chain_item, owner)
+ chain_head = chain.first
+ chain.reverse_each do |reflection|
+ # Exclude the scope of the association itself, because that
+ # was already merged in the #scope method.
+ reflection.constraints.each do |scope_chain_item|
+ item = eval_scope(reflection, scope_chain_item, owner)
- if scope_chain_item == refl.scope
- scope.merge! item.except(:where, :includes)
- end
+ if scope_chain_item == chain_head.scope
+ scope.merge! item.except(:where, :includes, :unscope, :order)
+ end
- reflection.all_includes do
- scope.includes! item.includes_values
- end
+ reflection.all_includes do
+ scope.includes! item.includes_values
+ end
- scope.unscope!(*item.unscope_values)
- scope.where_clause += item.where_clause
- scope.order_values |= item.order_values
+ scope.unscope!(*item.unscope_values)
+ scope.where_clause += item.where_clause
+ scope.order_values = item.order_values | scope.order_values
+ end
end
- reflection = reflection.next
+ scope
end
- scope
- end
+ def apply_scope(scope, table, key, value)
+ if scope.table == table
+ scope.where!(key => value)
+ else
+ scope.where!(table.name => { key => value })
+ end
+ end
- def eval_scope(klass, scope, owner)
- klass.unscoped.instance_exec(owner, &scope)
- end
+ def eval_scope(reflection, scope, owner)
+ relation = reflection.build_scope(reflection.aliased_table)
+ relation.instance_exec(owner, &scope) || relation
+ end
end
end
end
diff --git a/activerecord/lib/active_record/associations/belongs_to_association.rb b/activerecord/lib/active_record/associations/belongs_to_association.rb
index 41698c5360..3346725f2d 100644
--- a/activerecord/lib/active_record/associations/belongs_to_association.rb
+++ b/activerecord/lib/active_record/associations/belongs_to_association.rb
@@ -1,25 +1,28 @@
+# frozen_string_literal: true
+
module ActiveRecord
- # = Active Record Belongs To Association
module Associations
+ # = Active Record Belongs To Association
class BelongsToAssociation < SingularAssociation #:nodoc:
-
def handle_dependency
- target.send(options[:dependent]) if load_target
- end
+ return unless load_target
- def replace(record)
- if record
- raise_on_type_mismatch!(record)
- update_counters_on_replace(record)
- replace_keys(record)
- set_inverse_instance(record)
- @updated = true
+ case options[:dependent]
+ when :destroy
+ target.destroy
+ raise ActiveRecord::Rollback unless target.destroyed?
else
- decrement_counters
- remove_keys
+ target.send(options[:dependent])
end
+ end
+
+ def inversed_from(record)
+ replace_keys(record)
+ super
+ end
- self.target = record
+ def default(&block)
+ writer(owner.instance_exec(&block)) if reader.nil?
end
def reset
@@ -31,26 +34,60 @@ module ActiveRecord
@updated
end
- def decrement_counters # :nodoc:
+ def decrement_counters
update_counters(-1)
end
- def increment_counters # :nodoc:
+ def increment_counters
update_counters(1)
end
+ def decrement_counters_before_last_save
+ if reflection.polymorphic?
+ model_was = owner.attribute_before_last_save(reflection.foreign_type).try(:constantize)
+ else
+ model_was = klass
+ end
+
+ foreign_key_was = owner.attribute_before_last_save(reflection.foreign_key)
+
+ if foreign_key_was && model_was < ActiveRecord::Base
+ update_counters_via_scope(model_was, foreign_key_was, -1)
+ end
+ end
+
+ def target_changed?
+ owner.saved_change_to_attribute?(reflection.foreign_key)
+ end
+
private
+ def replace(record)
+ if record
+ raise_on_type_mismatch!(record)
+ set_inverse_instance(record)
+ @updated = true
+ end
+
+ replace_keys(record)
+
+ self.target = record
+ end
def update_counters(by)
if require_counter_update? && foreign_key_present?
if target && !stale_target?
- target.increment!(reflection.counter_cache_column, by)
+ target.increment!(reflection.counter_cache_column, by, touch: reflection.options[:touch])
else
- klass.update_counters(target_id, reflection.counter_cache_column => by)
+ update_counters_via_scope(klass, owner._read_attribute(reflection.foreign_key), by)
end
end
end
+ def update_counters_via_scope(klass, foreign_key, by)
+ scope = klass.unscoped.where!(primary_key(klass) => foreign_key)
+ scope.update_counters(reflection.counter_cache_column => by, touch: reflection.options[:touch])
+ end
+
def find_target?
!loaded? && foreign_key_present? && klass
end
@@ -59,24 +96,12 @@ module ActiveRecord
reflection.counter_cache_column && owner.persisted?
end
- def update_counters_on_replace(record)
- if require_counter_update? && different_target?(record)
- record.increment!(reflection.counter_cache_column)
- decrement_counters
- end
- end
-
- # Checks whether record is different to the current target, without loading it
- def different_target?(record)
- record.id != owner._read_attribute(reflection.foreign_key)
- end
-
def replace_keys(record)
- owner[reflection.foreign_key] = record._read_attribute(reflection.association_primary_key(record.class))
+ owner[reflection.foreign_key] = record ? record._read_attribute(primary_key(record.class)) : nil
end
- def remove_keys
- owner[reflection.foreign_key] = nil
+ def primary_key(klass)
+ reflection.association_primary_key(klass)
end
def foreign_key_present?
@@ -90,14 +115,6 @@ module ActiveRecord
inverse && inverse.has_one?
end
- def target_id
- if options[:primary_key]
- owner.send(reflection.name).try(:id)
- else
- owner._read_attribute(reflection.foreign_key)
- end
- end
-
def stale_state
result = owner._read_attribute(reflection.foreign_key) { |n| owner.send(:missing_attribute, n, caller) }
result && result.to_s
diff --git a/activerecord/lib/active_record/associations/belongs_to_polymorphic_association.rb b/activerecord/lib/active_record/associations/belongs_to_polymorphic_association.rb
index b710cf6bdb..9ae452e7a1 100644
--- a/activerecord/lib/active_record/associations/belongs_to_polymorphic_association.rb
+++ b/activerecord/lib/active_record/associations/belongs_to_polymorphic_association.rb
@@ -1,26 +1,22 @@
+# frozen_string_literal: true
+
module ActiveRecord
- # = Active Record Belongs To Polymorphic Association
module Associations
+ # = Active Record Belongs To Polymorphic Association
class BelongsToPolymorphicAssociation < BelongsToAssociation #:nodoc:
def klass
type = owner[reflection.foreign_type]
type.presence && type.constantize
end
- private
+ def target_changed?
+ super || owner.saved_change_to_attribute?(reflection.foreign_type)
+ end
+ private
def replace_keys(record)
super
- owner[reflection.foreign_type] = record.class.base_class.name
- end
-
- def remove_keys
- super
- owner[reflection.foreign_type] = nil
- end
-
- def different_target?(record)
- super || record.class != klass
+ owner[reflection.foreign_type] = record ? record.class.polymorphic_name : nil
end
def inverse_reflection_for(record)
diff --git a/activerecord/lib/active_record/associations/builder/association.rb b/activerecord/lib/active_record/associations/builder/association.rb
index d0534056d9..7c69cd65ee 100644
--- a/activerecord/lib/active_record/associations/builder/association.rb
+++ b/activerecord/lib/active_record/associations/builder/association.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
# This is the parent Association class which defines the variables
# used by all associations.
#
@@ -36,11 +38,6 @@ module ActiveRecord::Associations::Builder # :nodoc:
def self.create_reflection(model, name, scope, options, extension = nil)
raise ArgumentError, "association names must be a Symbol" unless name.kind_of?(Symbol)
- if scope.is_a?(Hash)
- options = scope
- scope = nil
- end
-
validate_options(options)
scope = build_scope(scope, extension)
@@ -107,8 +104,8 @@ module ActiveRecord::Associations::Builder # :nodoc:
def self.define_readers(mixin, name)
mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1
- def #{name}(*args)
- association(:#{name}).reader(*args)
+ def #{name}
+ association(:#{name}).reader
end
CODE
end
diff --git a/activerecord/lib/active_record/associations/builder/belongs_to.rb b/activerecord/lib/active_record/associations/builder/belongs_to.rb
index 346329c610..fc00f1e900 100644
--- a/activerecord/lib/active_record/associations/builder/belongs_to.rb
+++ b/activerecord/lib/active_record/associations/builder/belongs_to.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord::Associations::Builder # :nodoc:
class BelongsTo < SingularAssociation #:nodoc:
def self.macro
@@ -5,7 +7,7 @@ module ActiveRecord::Associations::Builder # :nodoc:
end
def self.valid_options(options)
- super + [:polymorphic, :touch, :counter_cache, :optional]
+ super + [:polymorphic, :touch, :counter_cache, :optional, :default]
end
def self.valid_dependent_options
@@ -16,70 +18,40 @@ module ActiveRecord::Associations::Builder # :nodoc:
super
add_counter_cache_callbacks(model, reflection) if reflection.options[:counter_cache]
add_touch_callbacks(model, reflection) if reflection.options[:touch]
- end
-
- def self.define_accessors(mixin, reflection)
- super
- add_counter_cache_methods mixin
- end
-
- def self.add_counter_cache_methods(mixin)
- return if mixin.method_defined? :belongs_to_counter_cache_after_update
-
- mixin.class_eval do
- def belongs_to_counter_cache_after_update(reflection)
- foreign_key = reflection.foreign_key
- cache_column = reflection.counter_cache_column
-
- if (@_after_create_counter_called ||= false)
- @_after_create_counter_called = false
- elsif attribute_changed?(foreign_key) && !new_record?
- if reflection.polymorphic?
- model = attribute(reflection.foreign_type).try(:constantize)
- model_was = attribute_was(reflection.foreign_type).try(:constantize)
- else
- model = reflection.klass
- model_was = reflection.klass
- end
-
- foreign_key_was = attribute_was foreign_key
- foreign_key = attribute foreign_key
-
- if foreign_key && model.respond_to?(:increment_counter)
- model.increment_counter(cache_column, foreign_key)
- end
-
- if foreign_key_was && model_was.respond_to?(:decrement_counter)
- model_was.decrement_counter(cache_column, foreign_key_was)
- end
- end
- end
- end
+ add_default_callbacks(model, reflection) if reflection.options[:default]
end
def self.add_counter_cache_callbacks(model, reflection)
cache_column = reflection.counter_cache_column
model.after_update lambda { |record|
- record.belongs_to_counter_cache_after_update(reflection)
+ association = association(reflection.name)
+
+ if association.target_changed?
+ association.increment_counters
+ association.decrement_counters_before_last_save
+ end
}
klass = reflection.class_name.safe_constantize
klass.attr_readonly cache_column if klass && klass.respond_to?(:attr_readonly)
end
- def self.touch_record(o, foreign_key, name, touch, touch_method) # :nodoc:
- old_foreign_id = o.changed_attributes[foreign_key]
+ def self.touch_record(o, changes, foreign_key, name, touch, touch_method) # :nodoc:
+ old_foreign_id = changes[foreign_key] && changes[foreign_key].first
if old_foreign_id
association = o.association(name)
reflection = association.reflection
if reflection.polymorphic?
- klass = o.public_send("#{reflection.foreign_type}_was").constantize
+ foreign_type = reflection.foreign_type
+ klass = changes[foreign_type] && changes[foreign_type].first || o.public_send(foreign_type)
+ klass = klass.constantize
else
klass = association.klass
end
- old_record = klass.find_by(klass.primary_key => old_foreign_id)
+ primary_key = reflection.association_primary_key(klass)
+ old_record = klass.find_by(primary_key => old_foreign_id)
if old_record
if touch != true
@@ -105,13 +77,29 @@ module ActiveRecord::Associations::Builder # :nodoc:
n = reflection.name
touch = reflection.options[:touch]
- callback = lambda { |record|
- BelongsTo.touch_record(record, foreign_key, n, touch, belongs_to_touch_method)
- }
+ callback = lambda { |changes_method| lambda { |record|
+ BelongsTo.touch_record(record, record.send(changes_method), foreign_key, n, touch, belongs_to_touch_method)
+ }}
- model.after_save callback, if: :changed?
- model.after_touch callback
- model.after_destroy callback
+ if reflection.counter_cache_column
+ touch_callback = callback.(:saved_changes)
+ update_callback = lambda { |record|
+ instance_exec(record, &touch_callback) unless association(reflection.name).target_changed?
+ }
+ model.after_update update_callback, if: :saved_changes?
+ else
+ model.after_create callback.(:saved_changes), if: :saved_changes?
+ model.after_update callback.(:saved_changes), if: :saved_changes?
+ model.after_destroy callback.(:changes_to_save)
+ end
+
+ model.after_touch callback.(:changes_to_save)
+ end
+
+ def self.add_default_callbacks(model, reflection)
+ model.before_validation lambda { |o|
+ o.association(reflection.name).default(&reflection.options[:default])
+ }
end
def self.add_destroy_callbacks(model, reflection)
diff --git a/activerecord/lib/active_record/associations/builder/collection_association.rb b/activerecord/lib/active_record/associations/builder/collection_association.rb
index f25bd7ca9f..5848cd9112 100644
--- a/activerecord/lib/active_record/associations/builder/collection_association.rb
+++ b/activerecord/lib/active_record/associations/builder/collection_association.rb
@@ -1,10 +1,9 @@
-# This class is inherited by the has_many and has_many_and_belongs_to_many association classes
+# frozen_string_literal: true
-require 'active_record/associations'
+require "active_record/associations"
module ActiveRecord::Associations::Builder # :nodoc:
class CollectionAssociation < Association #:nodoc:
-
CALLBACKS = [:before_add, :after_add, :before_remove, :after_remove]
def self.valid_options(options)
@@ -21,11 +20,11 @@ module ActiveRecord::Associations::Builder # :nodoc:
}
end
- def self.define_extensions(model, name)
+ def self.define_extensions(model, name, &block)
if block_given?
extension_module_name = "#{model.name.demodulize}#{name.to_s.camelize}AssociationExtension"
- extension = Module.new(&Proc.new)
- model.parent.const_set(extension_module_name, extension)
+ extension = Module.new(&block)
+ model.module_parent.const_set(extension_module_name, extension)
end
end
diff --git a/activerecord/lib/active_record/associations/builder/has_and_belongs_to_many.rb b/activerecord/lib/active_record/associations/builder/has_and_belongs_to_many.rb
index 5fbd79d118..0140aa15c8 100644
--- a/activerecord/lib/active_record/associations/builder/has_and_belongs_to_many.rb
+++ b/activerecord/lib/active_record/associations/builder/has_and_belongs_to_many.rb
@@ -1,38 +1,7 @@
+# frozen_string_literal: true
+
module ActiveRecord::Associations::Builder # :nodoc:
class HasAndBelongsToMany # :nodoc:
- class JoinTableResolver # :nodoc:
- KnownTable = Struct.new :join_table
-
- class KnownClass # :nodoc:
- def initialize(lhs_class, rhs_class_name)
- @lhs_class = lhs_class
- @rhs_class_name = rhs_class_name
- @join_table = nil
- end
-
- def join_table
- @join_table ||= [@lhs_class.table_name, klass.table_name].sort.join("\0").gsub(/^(.*[._])(.+)\0\1(.+)/, '\1\2_\3').tr("\0", "_")
- end
-
- private
-
- def klass
- @lhs_class.send(:compute_type, @rhs_class_name)
- end
- end
-
- def self.build(lhs_class, name, options)
- if options[:join_table]
- KnownTable.new options[:join_table].to_s
- else
- class_name = options.fetch(:class_name) {
- name.to_s.camelize.singularize
- }
- KnownClass.new lhs_class, class_name
- end
- end
- end
-
attr_reader :lhs_model, :association_name, :options
def initialize(association_name, lhs_model, options)
@@ -42,10 +11,8 @@ module ActiveRecord::Associations::Builder # :nodoc:
end
def through_model
- habtm = JoinTableResolver.build lhs_model, association_name, options
-
join_model = Class.new(ActiveRecord::Base) {
- class << self;
+ class << self
attr_accessor :left_model
attr_accessor :name
attr_accessor :table_name_resolver
@@ -54,7 +21,9 @@ module ActiveRecord::Associations::Builder # :nodoc:
end
def self.table_name
- table_name_resolver.join_table
+ # Table name needs to be resolved lazily
+ # because RHS class might not have been loaded
+ @table_name ||= table_name_resolver.call
end
def self.compute_type(class_name)
@@ -76,13 +45,15 @@ module ActiveRecord::Associations::Builder # :nodoc:
left_model.retrieve_connection
end
- def self.primary_key
- false
- end
+ private
+
+ def self.suppress_composite_primary_key(pk)
+ pk unless pk.is_a?(Array)
+ end
}
join_model.name = "HABTM_#{association_name.to_s.camelize}"
- join_model.table_name_resolver = habtm
+ join_model.table_name_resolver = -> { table_name }
join_model.left_model = lhs_model
join_model.add_left_association :left_side, anonymous_class: lhs_model
@@ -92,7 +63,7 @@ module ActiveRecord::Associations::Builder # :nodoc:
def middle_reflection(join_model)
middle_name = [lhs_model.name.downcase.pluralize,
- association_name].join('_'.freeze).gsub('::'.freeze, '_'.freeze).to_sym
+ association_name].join("_").gsub("::", "_").to_sym
middle_options = middle_options join_model
HasMany.create_reflection(lhs_model,
@@ -103,29 +74,41 @@ module ActiveRecord::Associations::Builder # :nodoc:
private
- def middle_options(join_model)
- middle_options = {}
- middle_options[:class_name] = "#{lhs_model.name}::#{join_model.name}"
- middle_options[:source] = join_model.left_reflection.name
- if options.key? :foreign_key
- middle_options[:foreign_key] = options[:foreign_key]
+ def middle_options(join_model)
+ middle_options = {}
+ middle_options[:class_name] = "#{lhs_model.name}::#{join_model.name}"
+ middle_options[:source] = join_model.left_reflection.name
+ if options.key? :foreign_key
+ middle_options[:foreign_key] = options[:foreign_key]
+ end
+ middle_options
end
- middle_options
- end
- def belongs_to_options(options)
- rhs_options = {}
-
- if options.key? :class_name
- rhs_options[:foreign_key] = options[:class_name].to_s.foreign_key
- rhs_options[:class_name] = options[:class_name]
+ def table_name
+ if options[:join_table]
+ options[:join_table].to_s
+ else
+ class_name = options.fetch(:class_name) {
+ association_name.to_s.camelize.singularize
+ }
+ klass = lhs_model.send(:compute_type, class_name.to_s)
+ [lhs_model.table_name, klass.table_name].sort.join("\0").gsub(/^(.*[._])(.+)\0\1(.+)/, '\1\2_\3').tr("\0", "_")
+ end
end
- if options.key? :association_foreign_key
- rhs_options[:foreign_key] = options[:association_foreign_key]
- end
+ def belongs_to_options(options)
+ rhs_options = {}
- rhs_options
- end
+ if options.key? :class_name
+ rhs_options[:foreign_key] = options[:class_name].to_s.foreign_key
+ rhs_options[:class_name] = options[:class_name]
+ end
+
+ if options.key? :association_foreign_key
+ rhs_options[:foreign_key] = options[:association_foreign_key]
+ end
+
+ rhs_options
+ end
end
end
diff --git a/activerecord/lib/active_record/associations/builder/has_many.rb b/activerecord/lib/active_record/associations/builder/has_many.rb
index 7864d4c536..5b9617bc6d 100644
--- a/activerecord/lib/active_record/associations/builder/has_many.rb
+++ b/activerecord/lib/active_record/associations/builder/has_many.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord::Associations::Builder # :nodoc:
class HasMany < CollectionAssociation #:nodoc:
def self.macro
diff --git a/activerecord/lib/active_record/associations/builder/has_one.rb b/activerecord/lib/active_record/associations/builder/has_one.rb
index 4de846d12b..bfb37d6eee 100644
--- a/activerecord/lib/active_record/associations/builder/has_one.rb
+++ b/activerecord/lib/active_record/associations/builder/has_one.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord::Associations::Builder # :nodoc:
class HasOne < SingularAssociation #:nodoc:
def self.macro
diff --git a/activerecord/lib/active_record/associations/builder/singular_association.rb b/activerecord/lib/active_record/associations/builder/singular_association.rb
index bb96202a22..0a02ef4cc1 100644
--- a/activerecord/lib/active_record/associations/builder/singular_association.rb
+++ b/activerecord/lib/active_record/associations/builder/singular_association.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
# This class is inherited by the has_one and belongs_to association classes
module ActiveRecord::Associations::Builder # :nodoc:
@@ -8,7 +10,16 @@ module ActiveRecord::Associations::Builder # :nodoc:
def self.define_accessors(model, reflection)
super
- define_constructors(model.generated_association_methods, reflection.name) if reflection.constructable?
+ mixin = model.generated_association_methods
+ name = reflection.name
+
+ define_constructors(mixin, name) if reflection.constructable?
+
+ mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def reload_#{name}
+ association(:#{name}).force_reload_reader
+ end
+ CODE
end
# Defines the (build|create)_association methods for belongs_to or has_one association
diff --git a/activerecord/lib/active_record/associations/collection_association.rb b/activerecord/lib/active_record/associations/collection_association.rb
index 2dca6b612e..c3d4eab562 100644
--- a/activerecord/lib/active_record/associations/collection_association.rb
+++ b/activerecord/lib/active_record/associations/collection_association.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Associations
# = Active Record Association Collection
@@ -10,9 +12,9 @@ module ActiveRecord
# HasManyAssociation => has_many
# HasManyThroughAssociation + ThroughAssociation => has_many :through
#
- # CollectionAssociation class provides common methods to the collections
+ # The CollectionAssociation class provides common methods to the collections
# defined by +has_and_belongs_to_many+, +has_many+ or +has_many+ with
- # +:through association+ option.
+ # the +:through association+ option.
#
# You need to be careful with assumptions regarding the target: The proxy
# does not fetch records from the database until it needs them, but new
@@ -24,28 +26,14 @@ module ActiveRecord
# If you need to work on all current children, new and existing records,
# +load_target+ and the +loaded+ flag are your friends.
class CollectionAssociation < Association #:nodoc:
-
# Implements the reader method, e.g. foo.items for Foo.has_many :items
- def reader(force_reload = false)
- if force_reload
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
- Passing an argument to force an association to reload is now
- deprecated and will be removed in Rails 5.1. Please call `reload`
- on the result collection proxy instead.
- MSG
-
- klass.uncached { reload }
- elsif stale_target?
+ def reader
+ if stale_target?
reload
end
- if null_scope?
- # Cache the proxy separately before the owner has an id
- # or else a post-save proxy will still lack the id
- @null_proxy ||= CollectionProxy.create(klass, self)
- else
- @proxy ||= CollectionProxy.create(klass, self)
- end
+ @proxy ||= CollectionProxy.create(klass, self)
+ @proxy.reset_scope
end
# Implements the writer method, e.g. foo.items= for Foo.has_many :items
@@ -56,105 +44,60 @@ module ActiveRecord
# Implements the ids reader method, e.g. foo.item_ids for Foo.has_many :items
def ids_reader
if loaded?
- load_target.map do |record|
- record.send(reflection.association_primary_key)
- end
+ target.pluck(reflection.association_primary_key)
+ elsif !target.empty?
+ load_target.pluck(reflection.association_primary_key)
else
- @association_ids ||= (
- column = "#{reflection.quoted_table_name}.#{reflection.association_primary_key}"
- scope.pluck(column)
- )
+ @association_ids ||= scope.pluck(reflection.association_primary_key)
end
end
# Implements the ids writer method, e.g. foo.item_ids= for Foo.has_many :items
def ids_writer(ids)
- pk_type = reflection.primary_key_type
+ primary_key = reflection.association_primary_key
+ pk_type = klass.type_for_attribute(primary_key)
ids = Array(ids).reject(&:blank?)
ids.map! { |i| pk_type.cast(i) }
- records = klass.where(reflection.association_primary_key => ids).index_by do |r|
- r.send(reflection.association_primary_key)
- end.values_at(*ids)
- replace(records)
- end
- def reset
- super
- @target = []
- end
+ records = klass.where(primary_key => ids).index_by do |r|
+ r.public_send(primary_key)
+ end.values_at(*ids).compact
- def select(*fields)
- if block_given?
- load_target.select.each { |e| yield e }
+ if records.size != ids.size
+ found_ids = records.map { |record| record.public_send(primary_key) }
+ not_found_ids = ids - found_ids
+ klass.all.raise_record_not_found_exception!(ids, records.size, ids.size, primary_key, not_found_ids)
else
- scope.select(*fields)
+ replace(records)
end
end
- def find(*args)
- if block_given?
- load_target.find(*args) { |*block_args| yield(*block_args) }
- else
- if options[:inverse_of] && loaded?
- args_flatten = args.flatten
- raise RecordNotFound, "Couldn't find #{scope.klass.name} without an ID" if args_flatten.blank?
- result = find_by_scan(*args)
-
- result_size = Array(result).size
- if !result || result_size != args_flatten.size
- scope.raise_record_not_found_exception!(args_flatten, result_size, args_flatten.size)
- else
- result
- end
- else
- scope.find(*args)
- end
- end
- end
-
- def first(*args)
- first_nth_or_last(:first, *args)
- end
-
- def second(*args)
- first_nth_or_last(:second, *args)
- end
-
- def third(*args)
- first_nth_or_last(:third, *args)
- end
-
- def fourth(*args)
- first_nth_or_last(:fourth, *args)
- end
-
- def fifth(*args)
- first_nth_or_last(:fifth, *args)
- end
-
- def forty_two(*args)
- first_nth_or_last(:forty_two, *args)
+ def reset
+ super
+ @target = []
+ @association_ids = nil
end
- def third_to_last(*args)
- first_nth_or_last(:third_to_last, *args)
- end
+ def find(*args)
+ if options[:inverse_of] && loaded?
+ args_flatten = args.flatten
+ model = scope.klass
- def second_to_last(*args)
- first_nth_or_last(:second_to_last, *args)
- end
+ if args_flatten.blank?
+ error_message = "Couldn't find #{model.name} without an ID"
+ raise RecordNotFound.new(error_message, model.name, model.primary_key, args)
+ end
- def last(*args)
- first_nth_or_last(:last, *args)
- end
+ result = find_by_scan(*args)
- def take(n = nil)
- if loaded?
- n ? target.take(n) : target.first
- else
- scope.take(n).tap do |record|
- set_inverse_instance record if record.is_a? ActiveRecord::Base
+ result_size = Array(result).size
+ if !result || result_size != args_flatten.size
+ scope.raise_record_not_found_exception!(args_flatten, result_size, args_flatten.size)
+ else
+ result
end
+ else
+ scope.find(*args)
end
end
@@ -162,23 +105,12 @@ module ActiveRecord
if attributes.is_a?(Array)
attributes.collect { |attr| build(attr, &block) }
else
- add_to_target(build_record(attributes)) do |record|
- yield(record) if block_given?
- end
+ add_to_target(build_record(attributes, &block))
end
end
- def create(attributes = {}, &block)
- _create_record(attributes, &block)
- end
-
- def create!(attributes = {}, &block)
- _create_record(attributes, true, &block)
- end
-
- # Add +records+ to this association. Returns +self+ so method calls may
- # be chained. Since << flattens its argument list and inserts each record,
- # +push+ and +concat+ behave identically.
+ # Add +records+ to this association. Since +<<+ flattens its argument list
+ # and inserts each record, +push+ and +concat+ behave identically.
def concat(*records)
records = records.flatten
if owner.new_record?
@@ -223,12 +155,12 @@ module ActiveRecord
end
dependent = if dependent
- dependent
- elsif options[:dependent] == :destroy
- :delete_all
- else
- options[:dependent]
- end
+ dependent
+ elsif options[:dependent] == :destroy
+ :delete_all
+ else
+ options[:dependent]
+ end
delete_or_nullify_all_records(dependent).tap do
reset
@@ -246,28 +178,6 @@ module ActiveRecord
end
end
- # Count all records using SQL. Construct options and pass them with
- # scope to the target class's +count+.
- def count(column_name = nil)
- relation = scope
- if association_scope.distinct_value
- # This is needed because 'SELECT count(DISTINCT *)..' is not valid SQL.
- column_name ||= reflection.klass.primary_key
- relation = relation.distinct
- end
-
- value = relation.count(column_name)
-
- limit = options[:limit]
- offset = options[:offset]
-
- if limit || offset
- [ [value - offset.to_i, 0].max, limit.to_i ].min
- else
- value
- end
- end
-
# Removes +records+ from this association calling +before_remove+ and
# +after_remove+ callbacks.
#
@@ -276,12 +186,7 @@ module ActiveRecord
# are actually removed from the database, that depends precisely on
# +delete_records+. They are in any case removed from the collection.
def delete(*records)
- return if records.empty?
- _options = records.extract_options!
- dependent = _options[:dependent] || options[:dependent]
-
- records = find(records) if records.any? { |record| record.kind_of?(Fixnum) || record.kind_of?(String) }
- delete_or_destroy(records, dependent)
+ delete_or_destroy(records, options[:dependent])
end
# Deletes the +records+ and removes them from this association calling
@@ -290,8 +195,6 @@ module ActiveRecord
# Note that this method removes records from the database ignoring the
# +:dependent+ option.
def destroy(*records)
- return if records.empty?
- records = find(records) if records.any? { |record| record.kind_of?(Fixnum) || record.kind_of?(String) }
delete_or_destroy(records, :destroy)
end
@@ -307,14 +210,12 @@ module ActiveRecord
# +count_records+, which is a method descendants have to provide.
def size
if !find_target? || loaded?
- if association_scope.distinct_value
- target.uniq.size
- else
- target.size
- end
- elsif !loaded? && !association_scope.group_values.empty?
+ target.size
+ elsif @association_ids
+ @association_ids.size
+ elsif !association_scope.group_values.empty?
load_target.size
- elsif !loaded? && !association_scope.distinct_value && target.is_a?(Array)
+ elsif !association_scope.distinct_value && !target.empty?
unsaved_records = target.select(&:new_record?)
unsaved_records.size + count_records
else
@@ -322,15 +223,6 @@ module ActiveRecord
end
end
- # Returns the size of the collection calling +size+ on the target.
- #
- # If the collection has been already loaded +length+ and +size+ are
- # equivalent. If not and you are going to need the records anyway this
- # method will take one less query. Otherwise +size+ is more efficient.
- def length
- load_target.size
- end
-
# Returns true if the collection is empty.
#
# If the collection has been loaded
@@ -340,42 +232,12 @@ module ActiveRecord
# loaded and you are going to fetch the records anyway it is better to
# check <tt>collection.length.zero?</tt>.
def empty?
- if loaded?
+ if loaded? || @association_ids || reflection.has_cached_counter?
size.zero?
else
- @target.blank? && !scope.exists?
- end
- end
-
- # Returns true if the collections is not empty.
- # If block given, loads all records and checks for one or more matches.
- # Otherwise, equivalent to +!collection.empty?+.
- def any?
- if block_given?
- load_target.any? { |*block_args| yield(*block_args) }
- else
- !empty?
- end
- end
-
- # Returns true if the collection has more than 1 record.
- # If block given, loads all records and checks for two or more matches.
- # Otherwise, equivalent to +collection.size > 1+.
- def many?
- if block_given?
- load_target.many? { |*block_args| yield(*block_args) }
- else
- size > 1
- end
- end
-
- def distinct
- seen = {}
- load_target.find_all do |record|
- seen[record.id] = true unless seen.key?(record.id)
+ target.empty? && !scope.exists?
end
end
- alias uniq distinct
# Replace this collection with +other_array+. This will perform a diff
# and delete/add only records that have changed.
@@ -423,29 +285,9 @@ module ActiveRecord
replace_on_target(record, index, skip_callbacks, &block)
end
- def replace_on_target(record, index, skip_callbacks)
- callback(:before_add, record) unless skip_callbacks
-
- was_loaded = loaded?
- yield(record) if block_given?
-
- unless !was_loaded && loaded?
- if index
- @target[index] = record
- else
- @target << record
- end
- end
-
- callback(:after_add, record) unless skip_callbacks
- set_inverse_instance(record)
-
- record
- end
-
- def scope(opts = {})
- scope = super()
- scope.none! if opts.fetch(:nullify, true) && null_scope?
+ def scope
+ scope = super
+ scope.none! if null_scope?
scope
end
@@ -453,28 +295,13 @@ module ActiveRecord
owner.new_record? && !foreign_key_present?
end
- private
- def get_records
- return scope.to_a if skip_statement_cache?
-
- conn = klass.connection
- sc = reflection.association_scope_cache(conn, owner) do
- StatementCache.create(conn) { |params|
- as = AssociationScope.create { params.bind }
- target_scope.merge as.scope(self, conn)
- }
- end
-
- binds = AssociationScope.get_bind_values(owner, reflection.chain)
- sc.execute binds, klass, klass.connection
+ def find_from_target?
+ loaded? ||
+ owner.new_record? ||
+ target.any? { |record| record.new_record? || record.changed? }
end
- def find_target
- records = get_records
- records.each { |record| set_inverse_instance(record) }
- records
- end
-
+ private
# We have some records loaded from the database (persisted) and some that are
# in-memory (memory). The same record may be represented in the persisted array
# and in the memory array.
@@ -492,7 +319,7 @@ module ActiveRecord
persisted.map! do |record|
if mem_record = memory.delete(record)
- ((record.attribute_names & mem_record.attribute_names) - mem_record.changes.keys).each do |name|
+ ((record.attribute_names & mem_record.attribute_names) - mem_record.changed_attribute_names_to_save).each do |name|
mem_record[name] = record[name]
end
@@ -513,25 +340,32 @@ module ActiveRecord
if attributes.is_a?(Array)
attributes.collect { |attr| _create_record(attr, raise, &block) }
else
+ record = build_record(attributes, &block)
transaction do
- add_to_target(build_record(attributes)) do |record|
- yield(record) if block_given?
- insert_record(record, true, raise)
+ result = nil
+ add_to_target(record) do
+ result = insert_record(record, true, raise) {
+ @_was_loaded = loaded?
+ }
end
+ raise ActiveRecord::Rollback unless result
end
+ record
end
end
# Do the relevant stuff to insert the given record into the association collection.
- def insert_record(record, validate = true, raise = false)
- raise NotImplementedError
- end
-
- def create_scope
- scope.scope_for_create.stringify_keys
+ def insert_record(record, validate = true, raise = false, &block)
+ if raise
+ record.save!(validate: validate, &block)
+ else
+ record.save(validate: validate, &block)
+ end
end
def delete_or_destroy(records, method)
+ return if records.empty?
+ records = find(records) if records.any? { |record| record.kind_of?(Integer) || record.kind_of?(String) }
records = records.flatten
records.each { |record| raise_on_type_mismatch!(record) }
existing_records = records.reject(&:new_record?)
@@ -547,21 +381,23 @@ module ActiveRecord
records.each { |record| callback(:before_remove, record) }
delete_records(existing_records, method) if existing_records.any?
- records.each { |record| target.delete(record) }
+ @target -= records
+ @association_ids = nil
records.each { |record| callback(:after_remove, record) }
end
- # Delete the given records from the association, using one of the methods :destroy,
- # :delete_all or :nullify (or nil, in which case a default is used).
+ # Delete the given records from the association,
+ # using one of the methods +:destroy+, +:delete_all+
+ # or +:nullify+ (or +nil+, in which case a default is used).
def delete_records(records, method)
raise NotImplementedError
end
def replace_records(new_target, original_target)
- delete(target - new_target)
+ delete(difference(target, new_target))
- unless concat(new_target - target)
+ unless concat(difference(new_target, target))
@target = original_target
raise RecordNotSaved, "Failed to replace #{reflection.name} because one or more of the " \
"new records could not be saved."
@@ -571,24 +407,53 @@ module ActiveRecord
end
def replace_common_records_in_memory(new_target, original_target)
- common_records = new_target & original_target
+ common_records = intersection(new_target, original_target)
common_records.each do |record|
skip_callbacks = true
replace_on_target(record, @target.index(record), skip_callbacks)
end
end
- def concat_records(records, should_raise = false)
+ def concat_records(records, raise = false)
result = true
records.each do |record|
raise_on_type_mismatch!(record)
- add_to_target(record) do |rec|
- result &&= insert_record(rec, true, should_raise) unless owner.new_record?
+ add_to_target(record) do
+ unless owner.new_record?
+ result &&= insert_record(record, true, raise) {
+ @_was_loaded = loaded?
+ }
+ end
end
end
- result && records
+ raise ActiveRecord::Rollback unless result
+
+ records
+ end
+
+ def replace_on_target(record, index, skip_callbacks)
+ callback(:before_add, record) unless skip_callbacks
+
+ set_inverse_instance(record)
+
+ @_was_loaded = true
+
+ yield(record) if block_given?
+
+ if index
+ target[index] = record
+ elsif @_was_loaded || !loaded?
+ @association_ids = nil
+ target << record
+ end
+
+ callback(:after_add, record) unless skip_callbacks
+
+ record
+ ensure
+ @_was_loaded = nil
end
def callback(method, record)
@@ -602,25 +467,6 @@ module ActiveRecord
owner.class.send(full_callback_name)
end
- # Should we deal with assoc.first or assoc.last by issuing an independent query to
- # the database, or by getting the target, and then taking the first/last item from that?
- #
- # If the args is just a non-empty options hash, go to the database.
- #
- # Otherwise, go to the database only if none of the following are true:
- # * target already loaded
- # * owner is new record
- # * target contains new or changed record(s)
- def fetch_first_nth_or_last_using_find?(args)
- if args.first.is_a?(Hash)
- true
- else
- !(loaded? ||
- owner.new_record? ||
- target.any? { |record| record.new_record? || record.changed? })
- end
- end
-
def include_in_memory?(record)
if reflection.is_a?(ActiveRecord::Reflection::ThroughReflection)
assoc = owner.association(reflection.through_reflection.name)
@@ -647,16 +493,6 @@ module ActiveRecord
load_target.select { |r| ids.include?(r.id.to_s) }
end
end
-
- # Fetches the first/last using SQL if possible, otherwise from the target array.
- def first_nth_or_last(type, *args)
- args.shift if args.first.is_a?(Hash) && args.first.empty?
-
- collection = fetch_first_nth_or_last_using_find?(args) ? scope : load_target
- collection.send(type, *args).tap do |record|
- set_inverse_instance record if record.is_a? ActiveRecord::Base
- end
- end
end
end
end
diff --git a/activerecord/lib/active_record/associations/collection_proxy.rb b/activerecord/lib/active_record/associations/collection_proxy.rb
index b9aed05135..edcb44f0fc 100644
--- a/activerecord/lib/active_record/associations/collection_proxy.rb
+++ b/activerecord/lib/active_record/associations/collection_proxy.rb
@@ -1,10 +1,9 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Associations
- # Association proxies in Active Record are middlemen between the object that
- # holds the association, known as the <tt>@owner</tt>, and the actual associated
- # object, known as the <tt>@target</tt>. The kind of association any proxy is
- # about is available in <tt>@reflection</tt>. That's an instance of the class
- # ActiveRecord::Reflection::AssociationReflection.
+ # Collection proxies in Active Record are middlemen between an
+ # <tt>association</tt>, and its <tt>target</tt> result set.
#
# For example, given
#
@@ -14,27 +13,26 @@ module ActiveRecord
#
# blog = Blog.first
#
- # the association proxy in <tt>blog.posts</tt> has the object in +blog+ as
- # <tt>@owner</tt>, the collection of its posts as <tt>@target</tt>, and
- # the <tt>@reflection</tt> object represents a <tt>:has_many</tt> macro.
+ # The collection proxy returned by <tt>blog.posts</tt> is built from a
+ # <tt>:has_many</tt> <tt>association</tt>, and delegates to a collection
+ # of posts as the <tt>target</tt>.
#
- # This class delegates unknown methods to <tt>@target</tt> via
- # <tt>method_missing</tt>.
+ # This class delegates unknown methods to the <tt>association</tt>'s
+ # relation class via a delegate cache.
#
- # The <tt>@target</tt> object is not \loaded until needed. For example,
+ # The <tt>target</tt> result set is not loaded until needed. For example,
#
# blog.posts.count
#
# is computed directly through SQL and does not trigger by itself the
# instantiation of the actual post records.
class CollectionProxy < Relation
- delegate(*(ActiveRecord::Calculations.public_instance_methods - [:count]), to: :scope)
- delegate :find_nth, to: :scope
-
def initialize(klass, association) #:nodoc:
@association = association
- super klass, klass.arel_table, klass.predicate_builder
- merge! association.scope(nullify: false)
+ super klass
+
+ extensions = association.extensions
+ extend(*extensions) if extensions.any?
end
def target
@@ -54,6 +52,12 @@ module ActiveRecord
@association.loaded?
end
+ ##
+ # :method: select
+ #
+ # :call-seq:
+ # select(*fields, &block)
+ #
# Works in two ways.
#
# *First:* Specify a subset of fields to be selected from the result set.
@@ -76,7 +80,7 @@ module ActiveRecord
# # #<Pet id: nil, name: "Choo-Choo">
# # ]
#
- # person.pets.select(:id, :name )
+ # person.pets.select(:id, :name)
# # => [
# # #<Pet id: 1, name: "Fancy-Fancy">,
# # #<Pet id: 2, name: "Spook">,
@@ -101,15 +105,6 @@ module ActiveRecord
# # #<Pet id: 2, name: "Spook", person_id: 1>,
# # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
# # ]
- #
- # person.pets.select(:name) { |pet| pet.name =~ /oo/ }
- # # => [
- # # #<Pet id: 2, name: "Spook">,
- # # #<Pet id: 3, name: "Choo-Choo">
- # # ]
- def select(*fields, &block)
- @association.select(*fields, &block)
- end
# Finds an object in the collection responding to the +id+. Uses the same
# rules as ActiveRecord::Base.find. Returns ActiveRecord::RecordNotFound
@@ -137,10 +132,17 @@ module ActiveRecord
# # #<Pet id: 2, name: "Spook", person_id: 1>,
# # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
# # ]
- def find(*args, &block)
- @association.find(*args, &block)
+ def find(*args)
+ return super if block_given?
+ @association.find(*args)
end
+ ##
+ # :method: first
+ #
+ # :call-seq:
+ # first(limit = nil)
+ #
# Returns the first record, or the first +n+ records, from the collection.
# If the collection is empty, the first form returns +nil+, and the second
# form returns an empty array.
@@ -167,45 +169,63 @@ module ActiveRecord
# another_person_without.pets # => []
# another_person_without.pets.first # => nil
# another_person_without.pets.first(3) # => []
- def first(*args)
- @association.first(*args)
- end
+ ##
+ # :method: second
+ #
+ # :call-seq:
+ # second()
+ #
# Same as #first except returns only the second record.
- def second(*args)
- @association.second(*args)
- end
+ ##
+ # :method: third
+ #
+ # :call-seq:
+ # third()
+ #
# Same as #first except returns only the third record.
- def third(*args)
- @association.third(*args)
- end
+ ##
+ # :method: fourth
+ #
+ # :call-seq:
+ # fourth()
+ #
# Same as #first except returns only the fourth record.
- def fourth(*args)
- @association.fourth(*args)
- end
+ ##
+ # :method: fifth
+ #
+ # :call-seq:
+ # fifth()
+ #
# Same as #first except returns only the fifth record.
- def fifth(*args)
- @association.fifth(*args)
- end
+ ##
+ # :method: forty_two
+ #
+ # :call-seq:
+ # forty_two()
+ #
# Same as #first except returns only the forty second record.
# Also known as accessing "the reddit".
- def forty_two(*args)
- @association.forty_two(*args)
- end
+ ##
+ # :method: third_to_last
+ #
+ # :call-seq:
+ # third_to_last()
+ #
# Same as #first except returns only the third-to-last record.
- def third_to_last(*args)
- @association.third_to_last(*args)
- end
+ ##
+ # :method: second_to_last
+ #
+ # :call-seq:
+ # second_to_last()
+ #
# Same as #first except returns only the second-to-last record.
- def second_to_last(*args)
- @association.second_to_last(*args)
- end
# Returns the last record, or the last +n+ records, from the collection.
# If the collection is empty, the first form returns +nil+, and the second
@@ -233,8 +253,9 @@ module ActiveRecord
# another_person_without.pets # => []
# another_person_without.pets.last # => nil
# another_person_without.pets.last(3) # => []
- def last(*args)
- @association.last(*args)
+ def last(limit = nil)
+ load_target if find_from_target?
+ super
end
# Gives a record (or N records if a parameter is supplied) from the collection
@@ -262,8 +283,9 @@ module ActiveRecord
# another_person_without.pets # => []
# another_person_without.pets.take # => nil
# another_person_without.pets.take(2) # => []
- def take(n = nil)
- @association.take(n)
+ def take(limit = nil)
+ load_target if find_from_target?
+ super
end
# Returns a new object of the collection type that has been instantiated
@@ -341,34 +363,6 @@ module ActiveRecord
@association.create!(attributes, &block)
end
- # Add one or more records to the collection by setting their foreign keys
- # to the association's primary key. Since #<< flattens its argument list and
- # inserts each record, +push+ and #concat behave identically. Returns +self+
- # so method calls may be chained.
- #
- # class Person < ActiveRecord::Base
- # has_many :pets
- # end
- #
- # person.pets.size # => 0
- # person.pets.concat(Pet.new(name: 'Fancy-Fancy'))
- # person.pets.concat(Pet.new(name: 'Spook'), Pet.new(name: 'Choo-Choo'))
- # person.pets.size # => 3
- #
- # person.id # => 1
- # person.pets
- # # => [
- # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
- # # #<Pet id: 2, name: "Spook", person_id: 1>,
- # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
- # # ]
- #
- # person.pets.concat([Pet.new(name: 'Brain'), Pet.new(name: 'Benny')])
- # person.pets.size # => 5
- def concat(*records)
- @association.concat(*records)
- end
-
# Replaces this collection with +other_array+. This will perform a diff
# and delete/add only records that have changed.
#
@@ -475,7 +469,7 @@ module ActiveRecord
# Pet.find(1, 2, 3)
# # => ActiveRecord::RecordNotFound: Couldn't find all Pets with 'id': (1, 2, 3)
def delete_all(dependent = nil)
- @association.delete_all(dependent)
+ @association.delete_all(dependent).tap { reset_scope }
end
# Deletes the records of the collection directly from the database
@@ -502,7 +496,7 @@ module ActiveRecord
#
# Pet.find(1) # => Couldn't find Pet with id=1
def destroy_all
- @association.destroy_all
+ @association.destroy_all.tap { reset_scope }
end
# Deletes the +records+ supplied from the collection according to the strategy
@@ -597,7 +591,7 @@ module ActiveRecord
# Pet.find(1)
# # => ActiveRecord::RecordNotFound: Couldn't find Pet with 'id'=1
#
- # You can pass +Fixnum+ or +String+ values, it finds the records
+ # You can pass +Integer+ or +String+ values, it finds the records
# responding to the +id+ and executes delete on them.
#
# class Person < ActiveRecord::Base
@@ -621,7 +615,7 @@ module ActiveRecord
# # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
# # ]
def delete(*records)
- @association.delete(*records)
+ @association.delete(*records).tap { reset_scope }
end
# Destroys the +records+ supplied and removes them from the collection.
@@ -661,7 +655,7 @@ module ActiveRecord
#
# Pet.find(1, 2, 3) # => ActiveRecord::RecordNotFound: Couldn't find all Pets with 'id': (1, 2, 3)
#
- # You can pass +Fixnum+ or +String+ values, it finds the records
+ # You can pass +Integer+ or +String+ values, it finds the records
# responding to the +id+ and then deletes them from the database.
#
# person.pets.size # => 3
@@ -693,9 +687,15 @@ module ActiveRecord
#
# Pet.find(4, 5, 6) # => ActiveRecord::RecordNotFound: Couldn't find all Pets with 'id': (4, 5, 6)
def destroy(*records)
- @association.destroy(*records)
+ @association.destroy(*records).tap { reset_scope }
end
+ ##
+ # :method: distinct
+ #
+ # :call-seq:
+ # distinct(value = true)
+ #
# Specifies whether the records should be unique or not.
#
# class Person < ActiveRecord::Base
@@ -710,17 +710,35 @@ module ActiveRecord
#
# person.pets.select(:name).distinct
# # => [#<Pet name: "Fancy-Fancy">]
- def distinct
- @association.distinct
+ #
+ # person.pets.select(:name).distinct.distinct(false)
+ # # => [
+ # # #<Pet name: "Fancy-Fancy">,
+ # # #<Pet name: "Fancy-Fancy">
+ # # ]
+
+ #--
+ def calculate(operation, column_name)
+ null_scope? ? scope.calculate(operation, column_name) : super
end
- alias uniq distinct
- # Count all records using SQL.
+ def pluck(*column_names)
+ null_scope? ? scope.pluck(*column_names) : super
+ end
+
+ ##
+ # :method: count
+ #
+ # :call-seq:
+ # count(column_name = nil, &block)
+ #
+ # Count all records.
#
# class Person < ActiveRecord::Base
# has_many :pets
# end
#
+ # # This will perform the count using SQL.
# person.pets.count # => 3
# person.pets
# # => [
@@ -728,9 +746,11 @@ module ActiveRecord
# # #<Pet id: 2, name: "Spook", person_id: 1>,
# # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
# # ]
- def count(column_name = nil)
- @association.count(column_name)
- end
+ #
+ # Passing a block will select all of a person's pets in SQL and then
+ # perform the count using Ruby.
+ #
+ # person.pets.count { |pet| pet.name.include?('-') } # => 2
# Returns the size of the collection. If the collection hasn't been loaded,
# it executes a <tt>SELECT COUNT(*)</tt> query. Else it calls <tt>collection.size</tt>.
@@ -760,6 +780,12 @@ module ActiveRecord
@association.size
end
+ ##
+ # :method: length
+ #
+ # :call-seq:
+ # length()
+ #
# Returns the size of the collection calling +size+ on the target.
# If the collection has been already loaded, +length+ and +size+ are
# equivalent. If not and you are going to need the records anyway this
@@ -780,14 +806,11 @@ module ActiveRecord
# # #<Pet id: 2, name: "Spook", person_id: 1>,
# # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
# # ]
- def length
- @association.length
- end
# Returns +true+ if the collection is empty. If the collection has been
# loaded it is equivalent
# to <tt>collection.size.zero?</tt>. If the collection has not been loaded,
- # it is equivalent to <tt>collection.exists?</tt>. If the collection has
+ # it is equivalent to <tt>!collection.exists?</tt>. If the collection has
# not already been loaded and you are going to fetch the records anyway it
# is better to check <tt>collection.length.zero?</tt>.
#
@@ -806,6 +829,12 @@ module ActiveRecord
@association.empty?
end
+ ##
+ # :method: any?
+ #
+ # :call-seq:
+ # any?()
+ #
# Returns +true+ if the collection is not empty.
#
# class Person < ActiveRecord::Base
@@ -835,10 +864,13 @@ module ActiveRecord
# pet.group == 'dogs'
# end
# # => true
- def any?(&block)
- @association.any?(&block)
- end
+ ##
+ # :method: many?
+ #
+ # :call-seq:
+ # many?()
+ #
# Returns true if the collection has more than one record.
# Equivalent to <tt>collection.size > 1</tt>.
#
@@ -873,9 +905,6 @@ module ActiveRecord
# pet.group == 'cats'
# end
# # => true
- def many?(&block)
- @association.many?(&block)
- end
# Returns +true+ if the given +record+ is present in the collection.
#
@@ -891,27 +920,14 @@ module ActiveRecord
!!@association.include?(record)
end
- def arel #:nodoc:
- scope.arel
- end
-
def proxy_association
@association
end
- # We don't want this object to be put on the scoping stack, because
- # that could create an infinite loop where we call an @association
- # method, which gets the current scope, which is this object, which
- # delegates to @association, and so on.
- def scoping
- @association.scope.scoping { yield }
- end
-
# Returns a <tt>Relation</tt> object for the records in this association
def scope
- @association.scope
+ @scope ||= @association.scope
end
- alias spawn scope
# Equivalent to <tt>Array#==</tt>. Returns +true+ if the two arrays
# contain the same number of elements and if each element is equal
@@ -941,6 +957,12 @@ module ActiveRecord
load_target == other
end
+ ##
+ # :method: to_ary
+ #
+ # :call-seq:
+ # to_ary()
+ #
# Returns a new array of objects from the collection. If the collection
# hasn't been loaded, it fetches the records from the database.
#
@@ -974,18 +996,15 @@ module ActiveRecord
# # #<Pet id: 5, name: "Brain", person_id: 1>,
# # #<Pet id: 6, name: "Boss", person_id: 1>
# # ]
- def to_ary
- load_target.dup
- end
- alias_method :to_a, :to_ary
def records # :nodoc:
load_target
end
# Adds one or more +records+ to the collection by setting their foreign keys
- # to the association's primary key. Returns +self+, so several appends may be
- # chained together.
+ # to the association's primary key. Since +<<+ flattens its argument list and
+ # inserts each record, +push+ and +concat+ behave identically. Returns +self+
+ # so several appends may be chained together.
#
# class Person < ActiveRecord::Base
# has_many :pets
@@ -1008,6 +1027,7 @@ module ActiveRecord
end
alias_method :push, :<<
alias_method :append, :<<
+ alias_method :concat, :<<
def prepend(*args)
raise NoMethodError, "prepend on association is not defined. Please use <<, push or append"
@@ -1025,7 +1045,6 @@ module ActiveRecord
end
# Reloads the collection from the database. Returns +self+.
- # Equivalent to <tt>collection(true)</tt>.
#
# class Person < ActiveRecord::Base
# has_many :pets
@@ -1039,12 +1058,9 @@ module ActiveRecord
#
# person.pets.reload # fetches pets from the database
# # => [#<Pet id: 1, name: "Snoop", group: "dogs", person_id: 1>]
- #
- # person.pets(true) # fetches pets from the database
- # # => [#<Pet id: 1, name: "Snoop", group: "dogs", person_id: 1>]
def reload
- proxy_association.reload
- self
+ proxy_association.reload(true)
+ reset_scope
end
# Unloads the association. Returns +self+.
@@ -1066,8 +1082,47 @@ module ActiveRecord
def reset
proxy_association.reset
proxy_association.reset_scope
+ reset_scope
+ end
+
+ def reset_scope # :nodoc:
+ @offsets = {}
+ @scope = nil
self
end
+
+ delegate_methods = [
+ QueryMethods,
+ SpawnMethods,
+ ].flat_map { |klass|
+ klass.public_instance_methods(false)
+ } - self.public_instance_methods(false) - [:select] + [:scoping, :values]
+
+ delegate(*delegate_methods, to: :scope)
+
+ private
+
+ def find_nth_with_limit(index, limit)
+ load_target if find_from_target?
+ super
+ end
+
+ def find_nth_from_last(index)
+ load_target if find_from_target?
+ super
+ end
+
+ def null_scope?
+ @association.null_scope?
+ end
+
+ def find_from_target?
+ @association.find_from_target?
+ end
+
+ def exec_queries
+ load_target
+ end
end
end
end
diff --git a/activerecord/lib/active_record/associations/foreign_association.rb b/activerecord/lib/active_record/associations/foreign_association.rb
index 3ceec0ee46..59af6f54c3 100644
--- a/activerecord/lib/active_record/associations/foreign_association.rb
+++ b/activerecord/lib/active_record/associations/foreign_association.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord::Associations
module ForeignAssociation # :nodoc:
def foreign_key_present?
@@ -7,5 +9,12 @@ module ActiveRecord::Associations
false
end
end
+
+ def nullified_owner_attributes
+ Hash.new.tap do |attrs|
+ attrs[reflection.foreign_key] = nil
+ attrs[reflection.type] = nil if reflection.type.present?
+ end
+ end
end
end
diff --git a/activerecord/lib/active_record/associations/has_many_association.rb b/activerecord/lib/active_record/associations/has_many_association.rb
index a9f6aaafef..5972846940 100644
--- a/activerecord/lib/active_record/associations/has_many_association.rb
+++ b/activerecord/lib/active_record/associations/has_many_association.rb
@@ -1,6 +1,8 @@
+# frozen_string_literal: true
+
module ActiveRecord
- # = Active Record Has Many Association
module Associations
+ # = Active Record Has Many Association
# This is the proxy that handles a has many association.
#
# If the association has a <tt>:through</tt> option further specialization
@@ -16,45 +18,22 @@ module ActiveRecord
when :restrict_with_error
unless empty?
record = owner.class.human_attribute_name(reflection.name).downcase
- message = owner.errors.generate_message(:base, :'restrict_dependent_destroy.many', record: record, raise: true) rescue nil
- if message
- ActiveSupport::Deprecation.warn(<<-MESSAGE.squish)
- The error key `:'restrict_dependent_destroy.many'` has been deprecated and will be removed in Rails 5.1.
- Please use `:'restrict_dependent_destroy.has_many'` instead.
- MESSAGE
- end
- owner.errors.add(:base, message || :'restrict_dependent_destroy.has_many', record: record)
+ owner.errors.add(:base, :'restrict_dependent_destroy.has_many', record: record)
throw(:abort)
end
+ when :destroy
+ # No point in executing the counter update since we're going to destroy the parent anyway
+ load_target.each { |t| t.destroyed_by_association = reflection }
+ destroy_all
else
- if options[:dependent] == :destroy
- # No point in executing the counter update since we're going to destroy the parent anyway
- load_target.each { |t| t.destroyed_by_association = reflection }
- destroy_all
- else
- delete_all
- end
+ delete_all
end
end
def insert_record(record, validate = true, raise = false)
set_owner_attributes(record)
- set_inverse_instance(record)
-
- if raise
- record.save!(:validate => validate)
- else
- record.save(:validate => validate)
- end
- end
-
- def empty?
- if reflection.has_cached_counter?
- size.zero?
- else
- super
- end
+ super
end
private
@@ -74,15 +53,15 @@ module ActiveRecord
# the loaded flag is set to true as well.
def count_records
count = if reflection.has_cached_counter?
- owner._read_attribute reflection.counter_cache_column
+ owner._read_attribute(reflection.counter_cache_column).to_i
else
- scope.count
+ scope.count(:all)
end
# If there's nothing in the database and @target has no new records
# we are certain the current target is an empty array. This is a
# documented side-effect of the method that may avoid an extra SELECT.
- @target ||= [] and loaded! if count == 0
+ loaded! if count == 0
[association_scope.limit_value, count].compact.min
end
@@ -105,13 +84,14 @@ module ActiveRecord
if method == :delete_all
scope.delete_all
else
- scope.update_all(reflection.foreign_key => nil)
+ scope.update_all(nullified_owner_attributes)
end
end
def delete_or_nullify_all_records(method)
- count = delete_count(method, self.scope)
+ count = delete_count(method, scope)
update_counter(-count)
+ count
end
# Deletes the records according to the <tt>:dependent</tt> option.
@@ -143,6 +123,14 @@ module ActiveRecord
end
saved_successfully
end
+
+ def difference(a, b)
+ a - b
+ end
+
+ def intersection(a, b)
+ a & b
+ end
end
end
end
diff --git a/activerecord/lib/active_record/associations/has_many_through_association.rb b/activerecord/lib/active_record/associations/has_many_through_association.rb
index 36fc381343..0d384950fe 100644
--- a/activerecord/lib/active_record/associations/has_many_through_association.rb
+++ b/activerecord/lib/active_record/associations/has_many_through_association.rb
@@ -1,14 +1,14 @@
+# frozen_string_literal: true
+
module ActiveRecord
- # = Active Record Has Many Through Association
module Associations
+ # = Active Record Has Many Through Association
class HasManyThroughAssociation < HasManyAssociation #:nodoc:
include ThroughAssociation
def initialize(owner, reflection)
super
-
- @through_records = {}
- @through_association = nil
+ @through_records = {}
end
def concat(*records)
@@ -21,27 +21,11 @@ module ActiveRecord
super
end
- def concat_records(records)
- ensure_not_nested
-
- records = super(records, true)
-
- if owner.new_record? && records
- records.flatten.each do |record|
- build_through_record(record)
- end
- end
-
- records
- end
-
def insert_record(record, validate = true, raise = false)
ensure_not_nested
- if raise
- record.save!(:validate => validate)
- else
- return unless record.save(:validate => validate)
+ if record.new_record? || record.has_changes_to_save?
+ return unless super
end
save_through_record(record)
@@ -50,9 +34,18 @@ module ActiveRecord
end
private
+ def concat_records(records)
+ ensure_not_nested
+
+ records = super(records, true)
+
+ if owner.new_record? && records
+ records.flatten.each do |record|
+ build_through_record(record)
+ end
+ end
- def through_association
- @through_association ||= owner.association(through_reflection.name)
+ records
end
# The through record (built with build_record) is temporarily cached
@@ -64,21 +57,14 @@ module ActiveRecord
@through_records[record.object_id] ||= begin
ensure_mutable
- through_record = through_association.build(*options_for_through_record)
- through_record.send("#{source_reflection.name}=", record)
-
- if options[:source_type]
- through_record.send("#{source_reflection.foreign_type}=", options[:source_type])
- end
+ attributes = through_scope_attributes
+ attributes[source_reflection.name] = record
+ attributes[source_reflection.foreign_type] = options[:source_type] if options[:source_type]
- through_record
+ through_association.build(attributes)
end
end
- def options_for_through_record
- [through_scope_attributes]
- end
-
def through_scope_attributes
scope.where_values_hash(through_association.reflection.name.to_s).
except!(through_association.reflection.foreign_key,
@@ -86,7 +72,10 @@ module ActiveRecord
end
def save_through_record(record)
- build_through_record(record).save!
+ association = build_through_record(record)
+ if association.changed?
+ association.save!
+ end
ensure
@through_records.delete(record.object_id)
end
@@ -94,7 +83,7 @@ module ActiveRecord
def build_record(attributes)
ensure_not_nested
- record = super(attributes)
+ record = super
inverse = source_reflection.inverse_of
if inverse
@@ -108,6 +97,11 @@ module ActiveRecord
record
end
+ def remove_records(existing_records, records, method)
+ super
+ delete_through_records(records)
+ end
+
def target_reflection_has_associated_record?
!(through_reflection.belongs_to? && owner[through_reflection.foreign_key].blank?)
end
@@ -132,21 +126,15 @@ module ActiveRecord
scope = through_association.scope
scope.where! construct_join_attributes(*records)
+ scope = scope.where(through_scope_attributes)
case method
when :destroy
if scope.klass.primary_key
- count = scope.destroy_all.length
+ count = scope.destroy_all.count(&:destroyed?)
else
scope.each(&:_run_destroy_callbacks)
-
- arel = scope.arel
-
- stmt = Arel::DeleteManager.new
- stmt.from scope.klass.arel_table
- stmt.wheres = arel.constraints
-
- count = scope.klass.connection.delete(stmt, 'SQL', scope.bound_attributes)
+ count = scope.delete_all
end
when :nullify
count = scope.update_all(source_reflection.foreign_key => nil)
@@ -166,6 +154,30 @@ module ActiveRecord
else
update_counter(-count)
end
+
+ count
+ end
+
+ def difference(a, b)
+ distribution = distribution(b)
+
+ a.reject { |record| mark_occurrence(distribution, record) }
+ end
+
+ def intersection(a, b)
+ distribution = distribution(b)
+
+ a.select { |record| mark_occurrence(distribution, record) }
+ end
+
+ def mark_occurrence(distribution, record)
+ distribution[record] > 0 && distribution[record] -= 1
+ end
+
+ def distribution(array)
+ array.each_with_object(Hash.new(0)) do |record, distribution|
+ distribution[record] += 1
+ end
end
def through_records_for(record)
@@ -196,7 +208,7 @@ module ActiveRecord
def find_target
return [] unless target_reflection_has_associated_record?
- get_records
+ super
end
# NOTE - not sure that we can actually cope with inverses here
diff --git a/activerecord/lib/active_record/associations/has_one_association.rb b/activerecord/lib/active_record/associations/has_one_association.rb
index 0fe9b2e81b..99971286a3 100644
--- a/activerecord/lib/active_record/associations/has_one_association.rb
+++ b/activerecord/lib/active_record/associations/has_one_association.rb
@@ -1,6 +1,8 @@
+# frozen_string_literal: true
+
module ActiveRecord
- # = Active Record Has One Association
module Associations
+ # = Active Record Has One Association
class HasOneAssociation < SingularAssociation #:nodoc:
include ForeignAssociation
@@ -12,14 +14,7 @@ module ActiveRecord
when :restrict_with_error
if load_target
record = owner.class.human_attribute_name(reflection.name).downcase
- message = owner.errors.generate_message(:base, :'restrict_dependent_destroy.one', record: record, raise: true) rescue nil
- if message
- ActiveSupport::Deprecation.warn(<<-MESSAGE.squish)
- The error key `:'restrict_dependent_destroy.one'` has been deprecated and will be removed in Rails 5.1.
- Please use `:'restrict_dependent_destroy.has_one'` instead.
- MESSAGE
- end
- owner.errors.add(:base, message || :'restrict_dependent_destroy.has_one', record: record)
+ owner.errors.add(:base, :'restrict_dependent_destroy.has_one', record: record)
throw(:abort)
end
@@ -28,49 +23,49 @@ module ActiveRecord
end
end
- def replace(record, save = true)
- raise_on_type_mismatch!(record) if record
- load_target
+ def delete(method = options[:dependent])
+ if load_target
+ case method
+ when :delete
+ target.delete
+ when :destroy
+ target.destroyed_by_association = reflection
+ target.destroy
+ throw(:abort) unless target.destroyed?
+ when :nullify
+ target.update_columns(nullified_owner_attributes) if target.persisted?
+ end
+ end
+ end
+
+ private
+ def replace(record, save = true)
+ raise_on_type_mismatch!(record) if record
- return self.target if !(target || record)
+ return target unless load_target || record
- assigning_another_record = target != record
- if assigning_another_record || record.changed?
- save &&= owner.persisted?
+ assigning_another_record = target != record
+ if assigning_another_record || record.has_changes_to_save?
+ save &&= owner.persisted?
- transaction_if(save) do
- remove_target!(options[:dependent]) if target && !target.destroyed? && assigning_another_record
+ transaction_if(save) do
+ remove_target!(options[:dependent]) if target && !target.destroyed? && assigning_another_record
- if record
- set_owner_attributes(record)
- set_inverse_instance(record)
+ if record
+ set_owner_attributes(record)
+ set_inverse_instance(record)
- if save && !record.save
- nullify_owner_attributes(record)
- set_owner_attributes(target) if target
- raise RecordNotSaved, "Failed to save the new associated #{reflection.name}."
+ if save && !record.save
+ nullify_owner_attributes(record)
+ set_owner_attributes(target) if target
+ raise RecordNotSaved, "Failed to save the new associated #{reflection.name}."
+ end
end
end
end
- end
- self.target = record
- end
-
- def delete(method = options[:dependent])
- if load_target
- case method
- when :delete
- target.delete
- when :destroy
- target.destroy
- when :nullify
- target.update_columns(reflection.foreign_key => nil) if target.persisted?
- end
+ self.target = record
end
- end
-
- private
# The reason that the save param for replace is false, if for create (not just build),
# is because the setting of the foreign keys is actually handled by the scoping when
@@ -82,18 +77,20 @@ module ActiveRecord
def remove_target!(method)
case method
- when :delete
- target.delete
- when :destroy
- target.destroy
- else
- nullify_owner_attributes(target)
-
- if target.persisted? && owner.persisted? && !target.save
- set_owner_attributes(target)
- raise RecordNotSaved, "Failed to remove the existing associated #{reflection.name}. " +
- "The record failed to save after its foreign key was set to nil."
- end
+ when :delete
+ target.delete
+ when :destroy
+ target.destroyed_by_association = reflection
+ target.destroy
+ else
+ nullify_owner_attributes(target)
+ remove_inverse_instance(target)
+
+ if target.persisted? && owner.persisted? && !target.save
+ set_owner_attributes(target)
+ raise RecordNotSaved, "Failed to remove the existing associated #{reflection.name}. " \
+ "The record failed to save after its foreign key was set to nil."
+ end
end
end
@@ -108,6 +105,14 @@ module ActiveRecord
yield
end
end
+
+ def _create_record(attributes, raise_error = false, &block)
+ unless owner.persisted?
+ raise ActiveRecord::RecordNotSaved, "You cannot call create unless the parent is saved"
+ end
+
+ super
+ end
end
end
end
diff --git a/activerecord/lib/active_record/associations/has_one_through_association.rb b/activerecord/lib/active_record/associations/has_one_through_association.rb
index 08e0ec691f..10978b2d93 100644
--- a/activerecord/lib/active_record/associations/has_one_through_association.rb
+++ b/activerecord/lib/active_record/associations/has_one_through_association.rb
@@ -1,30 +1,39 @@
+# frozen_string_literal: true
+
module ActiveRecord
- # = Active Record Has One Through Association
module Associations
+ # = Active Record Has One Through Association
class HasOneThroughAssociation < HasOneAssociation #:nodoc:
include ThroughAssociation
- def replace(record)
- create_through_record(record)
- self.target = record
- end
-
private
+ def replace(record, save = true)
+ create_through_record(record, save)
+ self.target = record
+ end
- def create_through_record(record)
+ def create_through_record(record, save)
ensure_not_nested
- through_proxy = owner.association(through_reflection.name)
- through_record = through_proxy.send(:load_target)
+ through_proxy = through_association
+ through_record = through_proxy.load_target
if through_record && !record
through_record.destroy
elsif record
attributes = construct_join_attributes(record)
+ if through_record && through_record.destroyed?
+ through_record = through_proxy.tap(&:reload).target
+ end
+
if through_record
- through_record.update(attributes)
- elsif owner.new_record?
+ if through_record.new_record?
+ through_record.assign_attributes(attributes)
+ else
+ through_record.update(attributes)
+ end
+ elsif owner.new_record? || !save
through_proxy.build(attributes)
else
through_proxy.create(attributes)
diff --git a/activerecord/lib/active_record/associations/join_dependency.rb b/activerecord/lib/active_record/associations/join_dependency.rb
index b94feeff12..b76005b587 100644
--- a/activerecord/lib/active_record/associations/join_dependency.rb
+++ b/activerecord/lib/active_record/associations/join_dependency.rb
@@ -1,21 +1,21 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Associations
class JoinDependency # :nodoc:
- autoload :JoinBase, 'active_record/associations/join_dependency/join_base'
- autoload :JoinAssociation, 'active_record/associations/join_dependency/join_association'
+ autoload :JoinBase, "active_record/associations/join_dependency/join_base"
+ autoload :JoinAssociation, "active_record/associations/join_dependency/join_association"
class Aliases # :nodoc:
def initialize(tables)
@tables = tables
- @alias_cache = tables.each_with_object({}) { |table,h|
- h[table.node] = table.columns.each_with_object({}) { |column,i|
+ @alias_cache = tables.each_with_object({}) { |table, h|
+ h[table.node] = table.columns.each_with_object({}) { |column, i|
i[column.name] = column.alias
}
}
- @name_and_alias_cache = tables.each_with_object({}) { |table,h|
- h[table.node] = table.columns.map { |column|
- [column.name, column.alias]
- }
+ @columns_cache = tables.each_with_object({}) { |table, h|
+ h[table.node] = table.columns
}
end
@@ -23,30 +23,23 @@ module ActiveRecord
@tables.flat_map(&:column_aliases)
end
- # An array of [column_name, alias] pairs for the table
def column_aliases(node)
- @name_and_alias_cache[node]
+ @columns_cache[node]
end
def column_alias(node, column)
@alias_cache[node][column]
end
- class Table < Struct.new(:node, :columns) # :nodoc:
- def table
- Arel::Nodes::TableAlias.new node.table, node.aliased_table_name
- end
-
+ Table = Struct.new(:node, :columns) do # :nodoc:
def column_aliases
- t = table
+ t = node.table
columns.map { |column| t[column.name].as Arel.sql column.alias }
end
end
Column = Struct.new(:name, :alias)
end
- attr_reader :alias_tracker, :base_klass, :join_root
-
def self.make_tree(associations)
hash = {}
walk_tree associations, hash
@@ -62,7 +55,7 @@ module ActiveRecord
walk_tree assoc, hash
end
when Hash
- associations.each do |k,v|
+ associations.each do |k, v|
cache = hash[k] ||= {}
walk_tree v, cache
end
@@ -71,69 +64,32 @@ module ActiveRecord
end
end
- # base is the base class on which operation is taking place.
- # associations is the list of associations which are joined using hash, symbol or array.
- # joins is the list of all string join commands and arel nodes.
- #
- # Example :
- #
- # class Physician < ActiveRecord::Base
- # has_many :appointments
- # has_many :patients, through: :appointments
- # end
- #
- # If I execute `@physician.patients.to_a` then
- # base # => Physician
- # associations # => []
- # joins # => [#<Arel::Nodes::InnerJoin: ...]
- #
- # However if I execute `Physician.joins(:appointments).to_a` then
- # base # => Physician
- # associations # => [:appointments]
- # joins # => []
- #
- def initialize(base, associations, joins)
- @alias_tracker = AliasTracker.create_with_joins(base.connection, base.table_name, joins, base.type_caster)
+ def initialize(base, table, associations)
tree = self.class.make_tree associations
- @join_root = JoinBase.new base, build(tree, base)
- @join_root.children.each { |child| construct_tables! @join_root, child }
+ @join_root = JoinBase.new(base, table, build(tree, base))
end
def reflections
join_root.drop(1).map!(&:reflection)
end
- def join_constraints(outer_joins, join_type)
- joins = join_root.children.flat_map { |child|
+ def join_constraints(joins_to_add, join_type, alias_tracker)
+ @alias_tracker = alias_tracker
- if join_type == Arel::Nodes::OuterJoin
- make_left_outer_joins join_root, child
- else
- make_inner_joins join_root, child
- end
- }
+ construct_tables!(join_root)
+ joins = make_join_constraints(join_root, join_type)
- joins.concat outer_joins.flat_map { |oj|
+ joins.concat joins_to_add.flat_map { |oj|
+ construct_tables!(oj.join_root)
if join_root.match? oj.join_root
walk join_root, oj.join_root
else
- oj.join_root.children.flat_map { |child|
- make_outer_joins oj.join_root, child
- }
+ make_join_constraints(oj.join_root, join_type)
end
}
end
- def aliases
- Aliases.new join_root.each_with_index.map { |join_part,i|
- columns = join_part.column_names.each_with_index.map { |column_name,j|
- Aliases::Column.new column_name, "t#{i}_r#{j}"
- }
- Aliases::Table.new(join_part, columns)
- }
- end
-
- def instantiate(result_set, aliases)
+ def instantiate(result_set, &block)
primary_key = aliases.column_alias(join_root, join_root.primary_key)
seen = Hash.new { |i, object_id|
@@ -142,7 +98,7 @@ module ActiveRecord
}
}
- model_cache = Hash.new { |h,klass| h[klass] = {} }
+ model_cache = Hash.new { |h, klass| h[klass] = {} }
parents = model_cache[join_root]
column_aliases = aliases.column_aliases join_root
@@ -153,146 +109,149 @@ module ActiveRecord
class_name: join_root.base_klass.name
}
- message_bus.instrument('instantiation.active_record', payload) do
+ message_bus.instrument("instantiation.active_record", payload) do
result_set.each { |row_hash|
parent_key = primary_key ? row_hash[primary_key] : row_hash
- parent = parents[parent_key] ||= join_root.instantiate(row_hash, column_aliases)
- construct(parent, join_root, row_hash, result_set, seen, model_cache, aliases)
+ parent = parents[parent_key] ||= join_root.instantiate(row_hash, column_aliases, &block)
+ construct(parent, join_root, row_hash, seen, model_cache)
}
end
parents.values
end
- private
-
- def make_constraints(parent, child, tables, join_type)
- chain = child.reflection.chain
- foreign_table = parent.table
- foreign_klass = parent.base_klass
- child.join_constraints(foreign_table, foreign_klass, child, join_type, tables, child.reflection.scope_chain, chain)
+ def apply_column_aliases(relation)
+ relation._select!(-> { aliases.columns })
end
- def make_outer_joins(parent, child)
- tables = table_aliases_for(parent, child)
- join_type = Arel::Nodes::OuterJoin
- info = make_constraints parent, child, tables, join_type
+ protected
+ attr_reader :join_root
- [info] + child.children.flat_map { |c| make_outer_joins(child, c) }
- end
+ private
+ attr_reader :alias_tracker
- def make_left_outer_joins(parent, child)
- tables = child.tables
- join_type = Arel::Nodes::OuterJoin
- info = make_constraints parent, child, tables, join_type
+ def aliases
+ @aliases ||= Aliases.new join_root.each_with_index.map { |join_part, i|
+ columns = join_part.column_names.each_with_index.map { |column_name, j|
+ Aliases::Column.new column_name, "t#{i}_r#{j}"
+ }
+ Aliases::Table.new(join_part, columns)
+ }
+ end
- [info] + child.children.flat_map { |c| make_left_outer_joins(child, c) }
- end
+ def construct_tables!(join_root)
+ join_root.each_children do |parent, child|
+ child.tables = table_aliases_for(parent, child)
+ end
+ end
- def make_inner_joins(parent, child)
- tables = child.tables
- join_type = Arel::Nodes::InnerJoin
- info = make_constraints parent, child, tables, join_type
+ def make_join_constraints(join_root, join_type)
+ join_root.children.flat_map do |child|
+ make_constraints(join_root, child, join_type)
+ end
+ end
- [info] + child.children.flat_map { |c| make_inner_joins(child, c) }
- end
+ def make_constraints(parent, child, join_type = Arel::Nodes::OuterJoin)
+ foreign_table = parent.table
+ foreign_klass = parent.base_klass
+ joins = child.join_constraints(foreign_table, foreign_klass, join_type, alias_tracker)
+ joins.concat child.children.flat_map { |c| make_constraints(child, c, join_type) }
+ end
- def table_aliases_for(parent, node)
- node.reflection.chain.map { |reflection|
- alias_tracker.aliased_table_for(
- reflection.table_name,
- table_alias_for(reflection, parent, reflection != node.reflection)
- )
- }
- end
+ def table_aliases_for(parent, node)
+ node.reflection.chain.map { |reflection|
+ alias_tracker.aliased_table_for(
+ reflection.table_name,
+ table_alias_for(reflection, parent, reflection != node.reflection),
+ reflection.klass.type_caster
+ )
+ }
+ end
- def construct_tables!(parent, node)
- node.tables = table_aliases_for(parent, node)
- node.children.each { |child| construct_tables! node, child }
- end
+ def table_alias_for(reflection, parent, join)
+ name = reflection.alias_candidate(parent.table_name)
+ join ? "#{name}_join" : name
+ end
- def table_alias_for(reflection, parent, join)
- name = "#{reflection.plural_name}_#{parent.table_name}"
- name << "_join" if join
- name
- end
+ def walk(left, right)
+ intersection, missing = right.children.map { |node1|
+ [left.children.find { |node2| node1.match? node2 }, node1]
+ }.partition(&:first)
- def walk(left, right)
- intersection, missing = right.children.map { |node1|
- [left.children.find { |node2| node1.match? node2 }, node1]
- }.partition(&:first)
+ joins = intersection.flat_map { |l, r| r.table = l.table; walk(l, r) }
+ joins.concat missing.flat_map { |_, n| make_constraints(left, n) }
+ end
- ojs = missing.flat_map { |_,n| make_outer_joins left, n }
- intersection.flat_map { |l,r| walk l, r }.concat ojs
- end
+ def find_reflection(klass, name)
+ klass._reflect_on_association(name) ||
+ raise(ConfigurationError, "Can't join '#{klass.name}' to association named '#{name}'; perhaps you misspelled it?")
+ end
- def find_reflection(klass, name)
- klass._reflect_on_association(name) or
- raise ConfigurationError, "Can't join '#{ klass.name }' to association named '#{ name }'; perhaps you misspelled it?"
- end
+ def build(associations, base_klass)
+ associations.map do |name, right|
+ reflection = find_reflection base_klass, name
+ reflection.check_validity!
+ reflection.check_eager_loadable!
- def build(associations, base_klass)
- associations.map do |name, right|
- reflection = find_reflection base_klass, name
- reflection.check_validity!
- reflection.check_eager_loadable!
+ if reflection.polymorphic?
+ raise EagerLoadPolymorphicError.new(reflection)
+ end
- if reflection.polymorphic?
- raise EagerLoadPolymorphicError.new(reflection)
+ JoinAssociation.new(reflection, build(right, reflection.klass))
end
-
- JoinAssociation.new reflection, build(right, reflection.klass)
end
- end
-
- def construct(ar_parent, parent, row, rs, seen, model_cache, aliases)
- return if ar_parent.nil?
- parent.children.each do |node|
- if node.reflection.collection?
- other = ar_parent.association(node.reflection.name)
- other.loaded!
- elsif ar_parent.association_cached?(node.reflection.name)
- model = ar_parent.association(node.reflection.name).target
- construct(model, node, row, rs, seen, model_cache, aliases)
- next
+ def construct(ar_parent, parent, row, seen, model_cache)
+ return if ar_parent.nil?
+
+ parent.children.each do |node|
+ if node.reflection.collection?
+ other = ar_parent.association(node.reflection.name)
+ other.loaded!
+ elsif ar_parent.association_cached?(node.reflection.name)
+ model = ar_parent.association(node.reflection.name).target
+ construct(model, node, row, seen, model_cache)
+ next
+ end
+
+ key = aliases.column_alias(node, node.primary_key)
+ id = row[key]
+ if id.nil?
+ nil_association = ar_parent.association(node.reflection.name)
+ nil_association.loaded!
+ next
+ end
+
+ model = seen[ar_parent.object_id][node][id]
+
+ if model
+ construct(model, node, row, seen, model_cache)
+ else
+ model = construct_model(ar_parent, node, row, model_cache, id)
+
+ seen[ar_parent.object_id][node][id] = model
+ construct(model, node, row, seen, model_cache)
+ end
end
+ end
- key = aliases.column_alias(node, node.primary_key)
- id = row[key]
- if id.nil?
- nil_association = ar_parent.association(node.reflection.name)
- nil_association.loaded!
- next
- end
+ def construct_model(record, node, row, model_cache, id)
+ other = record.association(node.reflection.name)
- model = seen[ar_parent.object_id][node.base_klass][id]
+ model = model_cache[node][id] ||=
+ node.instantiate(row, aliases.column_aliases(node)) do |m|
+ other.set_inverse_instance(m)
+ end
- if model
- construct(model, node, row, rs, seen, model_cache, aliases)
+ if node.reflection.collection?
+ other.target.push(model)
else
- model = construct_model(ar_parent, node, row, model_cache, id, aliases)
- model.readonly!
- seen[ar_parent.object_id][node.base_klass][id] = model
- construct(model, node, row, rs, seen, model_cache, aliases)
+ other.target = model
end
- end
- end
-
- def construct_model(record, node, row, model_cache, id, aliases)
- model = model_cache[node][id] ||= node.instantiate(row,
- aliases.column_aliases(node))
- other = record.association(node.reflection.name)
- if node.reflection.collection?
- other.target.push(model)
- else
- other.target = model
+ model.readonly! if node.readonly?
+ model
end
-
- other.set_inverse_instance(model)
- model
- end
end
end
end
diff --git a/activerecord/lib/active_record/associations/join_dependency/join_association.rb b/activerecord/lib/active_record/associations/join_dependency/join_association.rb
index c5fbe0d1d1..ca0305abbb 100644
--- a/activerecord/lib/active_record/associations/join_dependency/join_association.rb
+++ b/activerecord/lib/active_record/associations/join_dependency/join_association.rb
@@ -1,19 +1,20 @@
-require 'active_record/associations/join_dependency/join_part'
+# frozen_string_literal: true
+
+require "active_record/associations/join_dependency/join_part"
+require "active_support/core_ext/array/extract"
module ActiveRecord
module Associations
class JoinDependency # :nodoc:
class JoinAssociation < JoinPart # :nodoc:
- # The reflection of the association represented
- attr_reader :reflection
-
- attr_accessor :tables
+ attr_reader :reflection, :tables
+ attr_accessor :table
def initialize(reflection, children)
super(reflection.klass, children)
- @reflection = reflection
- @tables = nil
+ @reflection = reflection
+ @tables = nil
end
def match?(other)
@@ -21,112 +22,48 @@ module ActiveRecord
super && reflection == other.reflection
end
- JoinInformation = Struct.new :joins, :binds
-
- def join_constraints(foreign_table, foreign_klass, node, join_type, tables, scope_chain, chain)
- joins = []
- binds = []
- tables = tables.reverse
-
- scope_chain_index = 0
- scope_chain = scope_chain.reverse
+ def join_constraints(foreign_table, foreign_klass, join_type, alias_tracker)
+ joins = []
# The chain starts with the target table, but we want to end with it here (makes
# more sense in this context), so we reverse
- chain.reverse_each do |reflection|
- table = tables.shift
+ reflection.chain.reverse_each.with_index(1) do |reflection, i|
+ table = tables[-i]
klass = reflection.klass
- join_keys = reflection.join_keys(klass)
- key = join_keys.key
- foreign_key = join_keys.foreign_key
+ join_scope = reflection.join_scope(table, foreign_table, foreign_klass)
- constraint = build_constraint(klass, table, key, foreign_table, foreign_key)
+ arel = join_scope.arel(alias_tracker.aliases)
+ nodes = arel.constraints.first
- predicate_builder = PredicateBuilder.new(TableMetadata.new(klass, table))
- scope_chain_items = scope_chain[scope_chain_index].map do |item|
- if item.is_a?(Relation)
- item
- else
- ActiveRecord::Relation.create(klass, table, predicate_builder)
- .instance_exec(node, &item)
- end
- end
- scope_chain_index += 1
-
- klass_scope =
- if klass.current_scope
- klass.current_scope.clone
- else
- relation = ActiveRecord::Relation.create(
- klass,
- table,
- predicate_builder,
- )
- klass.send(:build_default_scope, relation)
- end
- scope_chain_items.concat [klass_scope].compact
-
- rel = scope_chain_items.inject(scope_chain_items.shift) do |left, right|
- left.merge right
+ others = nodes.children.extract! do |node|
+ Arel.fetch_attribute(node) { |attr| attr.relation.name != table.name }
end
- if rel && !rel.arel.constraints.empty?
- binds += rel.bound_attributes
- constraint = constraint.and rel.arel.constraints
- end
+ joins << table.create_join(table, table.create_on(nodes), join_type)
- if reflection.type
- value = foreign_klass.base_class.name
- column = klass.columns_hash[reflection.type.to_s]
-
- binds << Relation::QueryAttribute.new(column.name, value, klass.type_for_attribute(column.name))
- constraint = constraint.and klass.arel_attribute(reflection.type, table).eq(Arel::Nodes::BindParam.new)
+ unless others.empty?
+ joins.concat arel.join_sources
+ right = joins.last.right
+ right.expr.children.concat(others)
end
- joins << table.create_join(table, table.create_on(constraint), join_type)
-
# The current table in this iteration becomes the foreign table in the next
foreign_table, foreign_klass = table, klass
end
- JoinInformation.new joins, binds
+ joins
end
- # Builds equality condition.
- #
- # Example:
- #
- # class Physician < ActiveRecord::Base
- # has_many :appointments
- # end
- #
- # If I execute `Physician.joins(:appointments).to_a` then
- # klass # => Physician
- # table # => #<Arel::Table @name="appointments" ...>
- # key # => physician_id
- # foreign_table # => #<Arel::Table @name="physicians" ...>
- # foreign_key # => id
- #
- def build_constraint(klass, table, key, foreign_table, foreign_key)
- constraint = table[key].eq(foreign_table[foreign_key])
-
- if klass.finder_needs_type_condition?
- constraint = table.create_and([
- constraint,
- klass.send(:type_condition, table)
- ])
- end
-
- constraint
+ def tables=(tables)
+ @tables = tables
+ @table = tables.first
end
- def table
- tables.first
- end
+ def readonly?
+ return @readonly if defined?(@readonly)
- def aliased_table_name
- table.table_alias || table.name
+ @readonly = reflection.scope && reflection.scope_for(base_klass.unscoped).readonly_value
end
end
end
diff --git a/activerecord/lib/active_record/associations/join_dependency/join_base.rb b/activerecord/lib/active_record/associations/join_dependency/join_base.rb
index 3a26c25737..988b4e8fa2 100644
--- a/activerecord/lib/active_record/associations/join_dependency/join_base.rb
+++ b/activerecord/lib/active_record/associations/join_dependency/join_base.rb
@@ -1,20 +1,21 @@
-require 'active_record/associations/join_dependency/join_part'
+# frozen_string_literal: true
+
+require "active_record/associations/join_dependency/join_part"
module ActiveRecord
module Associations
class JoinDependency # :nodoc:
class JoinBase < JoinPart # :nodoc:
- def match?(other)
- return true if self == other
- super && base_klass == other.base_klass
- end
+ attr_reader :table
- def table
- base_klass.arel_table
+ def initialize(base_klass, table, children)
+ super(base_klass, children)
+ @table = table
end
- def aliased_table_name
- base_klass.table_name
+ def match?(other)
+ return true if self == other
+ super && base_klass == other.base_klass
end
end
end
diff --git a/activerecord/lib/active_record/associations/join_dependency/join_part.rb b/activerecord/lib/active_record/associations/join_dependency/join_part.rb
index 9c6573f913..3ad72a3646 100644
--- a/activerecord/lib/active_record/associations/join_dependency/join_part.rb
+++ b/activerecord/lib/active_record/associations/join_dependency/join_part.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Associations
class JoinDependency # :nodoc:
@@ -15,17 +17,13 @@ module ActiveRecord
# association.
attr_reader :base_klass, :children
- delegate :table_name, :column_names, :primary_key, :to => :base_klass
+ delegate :table_name, :column_names, :primary_key, to: :base_klass
def initialize(base_klass, children)
@base_klass = base_klass
@children = children
end
- def name
- reflection.name
- end
-
def match?(other)
self.class == other.class
end
@@ -35,13 +33,15 @@ module ActiveRecord
children.each { |child| child.each(&block) }
end
- # An Arel::Table for the active_record
- def table
- raise NotImplementedError
+ def each_children(&block)
+ children.each do |child|
+ yield self, child
+ child.each_children(&block)
+ end
end
- # The alias for the active_record's table
- def aliased_table_name
+ # An Arel::Table for the active_record
+ def table
raise NotImplementedError
end
@@ -54,16 +54,16 @@ module ActiveRecord
length = column_names_with_alias.length
while index < length
- column_name, alias_name = column_names_with_alias[index]
- hash[column_name] = row[alias_name]
+ column = column_names_with_alias[index]
+ hash[column.name] = row[column.alias]
index += 1
end
hash
end
- def instantiate(row, aliases)
- base_klass.instantiate(extract_record(row, aliases))
+ def instantiate(row, aliases, &block)
+ base_klass.instantiate(extract_record(row, aliases), &block)
end
end
end
diff --git a/activerecord/lib/active_record/associations/preloader.rb b/activerecord/lib/active_record/associations/preloader.rb
index ecf6fb8643..c7cd87f9d4 100644
--- a/activerecord/lib/active_record/associations/preloader.rb
+++ b/activerecord/lib/active_record/associations/preloader.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Associations
# Implements the details of eager loading of Active Record associations.
@@ -42,20 +44,10 @@ module ActiveRecord
extend ActiveSupport::Autoload
eager_autoload do
- autoload :Association, 'active_record/associations/preloader/association'
- autoload :SingularAssociation, 'active_record/associations/preloader/singular_association'
- autoload :CollectionAssociation, 'active_record/associations/preloader/collection_association'
- autoload :ThroughAssociation, 'active_record/associations/preloader/through_association'
-
- autoload :HasMany, 'active_record/associations/preloader/has_many'
- autoload :HasManyThrough, 'active_record/associations/preloader/has_many_through'
- autoload :HasOne, 'active_record/associations/preloader/has_one'
- autoload :HasOneThrough, 'active_record/associations/preloader/has_one_through'
- autoload :BelongsTo, 'active_record/associations/preloader/belongs_to'
+ autoload :Association, "active_record/associations/preloader/association"
+ autoload :ThroughAssociation, "active_record/associations/preloader/through_association"
end
- NULL_RELATION = Struct.new(:values, :where_clause, :joins_values).new({}, Relation::WhereClause.empty, [])
-
# Eager loads the named associations for the given Active Record record(s).
#
# In this description, 'association name' shall refer to the name passed
@@ -91,14 +83,12 @@ module ActiveRecord
# { author: :avatar }
# [ :books, { author: :avatar } ]
def preload(records, associations, preload_scope = nil)
- records = Array.wrap(records).compact.uniq
- associations = Array.wrap(associations)
- preload_scope = preload_scope || NULL_RELATION
+ records = Array.wrap(records).compact
if records.empty?
[]
else
- associations.flat_map { |association|
+ Array.wrap(associations).flat_map { |association|
preloaders_on association, records, preload_scope
}
end
@@ -106,107 +96,107 @@ module ActiveRecord
private
- # Loads all the given data into +records+ for the +association+.
- def preloaders_on(association, records, scope)
- case association
- when Hash
- preloaders_for_hash(association, records, scope)
- when Symbol
- preloaders_for_one(association, records, scope)
- when String
- preloaders_for_one(association.to_sym, records, scope)
- else
- raise ArgumentError, "#{association.inspect} was not recognized for preload"
+ # Loads all the given data into +records+ for the +association+.
+ def preloaders_on(association, records, scope, polymorphic_parent = false)
+ case association
+ when Hash
+ preloaders_for_hash(association, records, scope, polymorphic_parent)
+ when Symbol, String
+ preloaders_for_one(association, records, scope, polymorphic_parent)
+ else
+ raise ArgumentError, "#{association.inspect} was not recognized for preload"
+ end
end
- end
-
- def preloaders_for_hash(association, records, scope)
- association.flat_map { |parent, child|
- loaders = preloaders_for_one parent, records, scope
- recs = loaders.flat_map(&:preloaded_records).uniq
- loaders.concat Array.wrap(child).flat_map { |assoc|
- preloaders_on assoc, recs, scope
+ def preloaders_for_hash(association, records, scope, polymorphic_parent)
+ association.flat_map { |parent, child|
+ grouped_records(parent, records, polymorphic_parent).flat_map do |reflection, reflection_records|
+ loaders = preloaders_for_reflection(reflection, reflection_records, scope)
+ recs = loaders.flat_map(&:preloaded_records)
+ child_polymorphic_parent = reflection && reflection.options[:polymorphic]
+ loaders.concat Array.wrap(child).flat_map { |assoc|
+ preloaders_on assoc, recs, scope, child_polymorphic_parent
+ }
+ loaders
+ end
}
- loaders
- }
- end
-
- # Loads all the given data into +records+ for a singular +association+.
- #
- # Functions by instantiating a preloader class such as Preloader::HasManyThrough and
- # call the +run+ method for each passed in class in the +records+ argument.
- #
- # Not all records have the same class, so group then preload group on the reflection
- # itself so that if various subclass share the same association then we do not split
- # them unnecessarily
- #
- # Additionally, polymorphic belongs_to associations can have multiple associated
- # classes, depending on the polymorphic_type field. So we group by the classes as
- # well.
- def preloaders_for_one(association, records, scope)
- grouped_records(association, records).flat_map do |reflection, klasses|
- klasses.map do |rhs_klass, rs|
- loader = preloader_for(reflection, rs, rhs_klass).new(rhs_klass, rs, reflection, scope)
- loader.run self
- loader
- end
end
- end
- def grouped_records(association, records)
- h = {}
- records.each do |record|
- next unless record
- assoc = record.association(association)
- klasses = h[assoc.reflection] ||= {}
- (klasses[assoc.klass] ||= []) << record
+ # Loads all the given data into +records+ for a singular +association+.
+ #
+ # Functions by instantiating a preloader class such as Preloader::Association and
+ # call the +run+ method for each passed in class in the +records+ argument.
+ #
+ # Not all records have the same class, so group then preload group on the reflection
+ # itself so that if various subclass share the same association then we do not split
+ # them unnecessarily
+ #
+ # Additionally, polymorphic belongs_to associations can have multiple associated
+ # classes, depending on the polymorphic_type field. So we group by the classes as
+ # well.
+ def preloaders_for_one(association, records, scope, polymorphic_parent)
+ grouped_records(association, records, polymorphic_parent)
+ .flat_map do |reflection, reflection_records|
+ preloaders_for_reflection reflection, reflection_records, scope
+ end
end
- h
- end
- class AlreadyLoaded # :nodoc:
- attr_reader :owners, :reflection
+ def preloaders_for_reflection(reflection, records, scope)
+ records.group_by { |record| record.association(reflection.name).klass }.map do |rhs_klass, rs|
+ preloader_for(reflection, rs).new(rhs_klass, rs, reflection, scope).run
+ end
+ end
- def initialize(klass, owners, reflection, preload_scope)
- @owners = owners
- @reflection = reflection
+ def grouped_records(association, records, polymorphic_parent)
+ h = {}
+ records.each do |record|
+ next unless record
+ reflection = record.class._reflect_on_association(association)
+ next if polymorphic_parent && !reflection || !record.association(association).klass
+ (h[reflection] ||= []) << record
+ end
+ h
end
- def run(preloader); end
+ class AlreadyLoaded # :nodoc:
+ def initialize(klass, owners, reflection, preload_scope)
+ @owners = owners
+ @reflection = reflection
+ end
- def preloaded_records
- owners.flat_map { |owner| owner.association(reflection.name).target }
- end
- end
+ def run
+ self
+ end
- class NullPreloader # :nodoc:
- def self.new(klass, owners, reflection, preload_scope); self; end
- def self.run(preloader); end
- def self.preloaded_records; []; end
- end
+ def preloaded_records
+ @preloaded_records ||= records_by_owner.flat_map(&:last)
+ end
- # Returns a class containing the logic needed to load preload the data
- # and attach it to a relation. For example +Preloader::Association+ or
- # +Preloader::HasManyThrough+. The class returned implements a `run` method
- # that accepts a preloader.
- def preloader_for(reflection, owners, rhs_klass)
- return NullPreloader unless rhs_klass
+ def records_by_owner
+ @records_by_owner ||= owners.each_with_object({}) do |owner, result|
+ result[owner] = Array(owner.association(reflection.name).target)
+ end
+ end
- if owners.first.association(reflection.name).loaded?
- return AlreadyLoaded
+ private
+ attr_reader :owners, :reflection
end
- reflection.check_preloadable!
-
- case reflection.macro
- when :has_many
- reflection.options[:through] ? HasManyThrough : HasMany
- when :has_one
- reflection.options[:through] ? HasOneThrough : HasOne
- when :belongs_to
- BelongsTo
+
+ # Returns a class containing the logic needed to load preload the data
+ # and attach it to a relation. The class returned implements a `run` method
+ # that accepts a preloader.
+ def preloader_for(reflection, owners)
+ if owners.first.association(reflection.name).loaded?
+ return AlreadyLoaded
+ end
+ reflection.check_preloadable!
+
+ if reflection.options[:through]
+ ThroughAssociation
+ else
+ Association
+ end
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/associations/preloader/association.rb b/activerecord/lib/active_record/associations/preloader/association.rb
index 3032bc786e..46532f651e 100644
--- a/activerecord/lib/active_record/associations/preloader/association.rb
+++ b/activerecord/lib/active_record/associations/preloader/association.rb
@@ -1,159 +1,136 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Associations
class Preloader
class Association #:nodoc:
- attr_reader :owners, :reflection, :preload_scope, :model, :klass
- attr_reader :preloaded_records
-
def initialize(klass, owners, reflection, preload_scope)
@klass = klass
@owners = owners
@reflection = reflection
@preload_scope = preload_scope
@model = owners.first && owners.first.class
- @scope = nil
- @preloaded_records = []
- end
-
- def run(preloader)
- preload(preloader)
- end
-
- def preload(preloader)
- raise NotImplementedError
- end
-
- def scope
- @scope ||= build_scope
- end
-
- def records_for(ids)
- query_scope(ids)
- end
-
- def query_scope(ids)
- scope.where(association_key_name => ids)
- end
-
- def table
- klass.arel_table
- end
-
- # The name of the key on the associated records
- def association_key_name
- raise NotImplementedError
- end
-
- # This is overridden by HABTM as the condition should be on the foreign_key column in
- # the join table
- def association_key
- klass.arel_attribute(association_key_name, table)
- end
-
- # The name of the key on the model which declares the association
- def owner_key_name
- raise NotImplementedError
- end
-
- def options
- reflection.options
end
- private
-
- def associated_records_by_owner(preloader)
- records = load_records
- owners.each_with_object({}) do |owner, result|
- result[owner] = records[convert_key(owner[owner_key_name])] || []
+ def run
+ if !preload_scope || preload_scope.empty_scope?
+ owners.each do |owner|
+ associate_records_to_owner(owner, records_by_owner[owner] || [])
+ end
+ else
+ # Custom preload scope is used and
+ # the association can not be marked as loaded
+ # Loading into a Hash instead
+ records_by_owner
end
+ self
end
- def owner_keys
- unless defined?(@owner_keys)
- @owner_keys = owners.map do |owner|
- owner[owner_key_name]
+ def records_by_owner
+ @records_by_owner ||= preloaded_records.each_with_object({}) do |record, result|
+ owners_by_key[convert_key(record[association_key_name])].each do |owner|
+ (result[owner] ||= []) << record
end
- @owner_keys.uniq!
- @owner_keys.compact!
end
- @owner_keys
end
- def key_conversion_required?
- @key_conversion_required ||= association_key_type != owner_key_type
- end
-
- def convert_key(key)
- if key_conversion_required?
- key.to_s
- else
- key
+ def preloaded_records
+ return @preloaded_records if defined?(@preloaded_records)
+ return [] if owner_keys.empty?
+ # Some databases impose a limit on the number of ids in a list (in Oracle it's 1000)
+ # Make several smaller queries if necessary or make one query if the adapter supports it
+ slices = owner_keys.each_slice(klass.connection.in_clause_length || owner_keys.size)
+ @preloaded_records = slices.flat_map do |slice|
+ records_for(slice)
end
end
- def association_key_type
- @klass.type_for_attribute(association_key_name.to_s).type
- end
+ private
+ attr_reader :owners, :reflection, :preload_scope, :model, :klass
- def owner_key_type
- @model.type_for_attribute(owner_key_name.to_s).type
- end
+ # The name of the key on the associated records
+ def association_key_name
+ reflection.join_primary_key(klass)
+ end
- def load_records
- return {} if owner_keys.empty?
- # Some databases impose a limit on the number of ids in a list (in Oracle it's 1000)
- # Make several smaller queries if necessary or make one query if the adapter supports it
- slices = owner_keys.each_slice(klass.connection.in_clause_length || owner_keys.size)
- @preloaded_records = slices.flat_map do |slice|
- records_for(slice)
+ # The name of the key on the model which declares the association
+ def owner_key_name
+ reflection.join_foreign_key
end
- @preloaded_records.group_by do |record|
- convert_key(record[association_key_name])
+
+ def associate_records_to_owner(owner, records)
+ association = owner.association(reflection.name)
+ if reflection.collection?
+ association.target = records
+ else
+ association.target = records.first
+ end
end
- end
- def reflection_scope
- @reflection_scope ||= reflection.scope ? klass.unscoped.instance_exec(nil, &reflection.scope) : klass.unscoped
- end
+ def owner_keys
+ @owner_keys ||= owners_by_key.keys
+ end
- def build_scope
- scope = klass.unscoped
+ def owners_by_key
+ @owners_by_key ||= owners.each_with_object({}) do |owner, result|
+ key = convert_key(owner[owner_key_name])
+ (result[key] ||= []) << owner if key
+ end
+ end
- values = reflection_scope.values
- preload_values = preload_scope.values
+ def key_conversion_required?
+ unless defined?(@key_conversion_required)
+ @key_conversion_required = (association_key_type != owner_key_type)
+ end
- scope.where_clause = reflection_scope.where_clause + preload_scope.where_clause
- scope.references_values = Array(values[:references]) + Array(preload_values[:references])
+ @key_conversion_required
+ end
- if preload_values[:select] || values[:select]
- scope._select!(preload_values[:select] || values[:select])
+ def convert_key(key)
+ if key_conversion_required?
+ key.to_s
+ else
+ key
+ end
end
- scope.includes! preload_values[:includes] || values[:includes]
- if preload_scope.joins_values.any?
- scope.joins!(preload_scope.joins_values)
- else
- scope.joins!(reflection_scope.joins_values)
+
+ def association_key_type
+ @klass.type_for_attribute(association_key_name).type
end
- if order_values = preload_values[:order] || values[:order]
- scope.order!(order_values)
+ def owner_key_type
+ @model.type_for_attribute(owner_key_name).type
end
- if preload_values[:reordering] || values[:reordering]
- scope.reordering_value = true
+ def records_for(ids)
+ scope.where(association_key_name => ids).load do |record|
+ # Processing only the first owner
+ # because the record is modified but not an owner
+ owner = owners_by_key[convert_key(record[association_key_name])].first
+ association = owner.association(reflection.name)
+ association.set_inverse_instance(record)
+ end
end
- if preload_values[:readonly] || values[:readonly]
- scope.readonly!
+ def scope
+ @scope ||= build_scope
end
- if options[:as]
- scope.where!(klass.table_name => { reflection.type => model.base_class.sti_name })
+ def reflection_scope
+ @reflection_scope ||= reflection.scope ? reflection.scope_for(klass.unscoped) : klass.unscoped
end
- scope.unscope_values = Array(values[:unscope]) + Array(preload_values[:unscope])
- klass.default_scoped.merge(scope)
- end
+ def build_scope
+ scope = klass.scope_for_association
+
+ if reflection.type && !reflection.through_reflection?
+ scope.where!(reflection.type => model.polymorphic_name)
+ end
+
+ scope.merge!(reflection_scope) if reflection.scope
+ scope.merge!(preload_scope) if preload_scope
+ scope
+ end
end
end
end
diff --git a/activerecord/lib/active_record/associations/preloader/belongs_to.rb b/activerecord/lib/active_record/associations/preloader/belongs_to.rb
deleted file mode 100644
index 5091d4717a..0000000000
--- a/activerecord/lib/active_record/associations/preloader/belongs_to.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-module ActiveRecord
- module Associations
- class Preloader
- class BelongsTo < SingularAssociation #:nodoc:
-
- def association_key_name
- reflection.options[:primary_key] || klass && klass.primary_key
- end
-
- def owner_key_name
- reflection.foreign_key
- end
-
- end
- end
- end
-end
diff --git a/activerecord/lib/active_record/associations/preloader/collection_association.rb b/activerecord/lib/active_record/associations/preloader/collection_association.rb
deleted file mode 100644
index 9939280fa4..0000000000
--- a/activerecord/lib/active_record/associations/preloader/collection_association.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-module ActiveRecord
- module Associations
- class Preloader
- class CollectionAssociation < Association #:nodoc:
- private
-
- def preload(preloader)
- associated_records_by_owner(preloader).each do |owner, records|
- association = owner.association(reflection.name)
- association.loaded!
- association.target.concat(records)
- records.each { |record| association.set_inverse_instance(record) }
- end
- end
- end
- end
- end
-end
diff --git a/activerecord/lib/active_record/associations/preloader/has_many.rb b/activerecord/lib/active_record/associations/preloader/has_many.rb
deleted file mode 100644
index 3ea91a8c11..0000000000
--- a/activerecord/lib/active_record/associations/preloader/has_many.rb
+++ /dev/null
@@ -1,17 +0,0 @@
-module ActiveRecord
- module Associations
- class Preloader
- class HasMany < CollectionAssociation #:nodoc:
-
- def association_key_name
- reflection.foreign_key
- end
-
- def owner_key_name
- reflection.active_record_primary_key
- end
-
- end
- end
- end
-end
diff --git a/activerecord/lib/active_record/associations/preloader/has_many_through.rb b/activerecord/lib/active_record/associations/preloader/has_many_through.rb
deleted file mode 100644
index 2029871f39..0000000000
--- a/activerecord/lib/active_record/associations/preloader/has_many_through.rb
+++ /dev/null
@@ -1,19 +0,0 @@
-module ActiveRecord
- module Associations
- class Preloader
- class HasManyThrough < CollectionAssociation #:nodoc:
- include ThroughAssociation
-
- def associated_records_by_owner(preloader)
- records_by_owner = super
-
- if reflection_scope.distinct_value
- records_by_owner.each_value(&:uniq!)
- end
-
- records_by_owner
- end
- end
- end
- end
-end
diff --git a/activerecord/lib/active_record/associations/preloader/has_one.rb b/activerecord/lib/active_record/associations/preloader/has_one.rb
deleted file mode 100644
index c4add621ca..0000000000
--- a/activerecord/lib/active_record/associations/preloader/has_one.rb
+++ /dev/null
@@ -1,15 +0,0 @@
-module ActiveRecord
- module Associations
- class Preloader
- class HasOne < SingularAssociation #:nodoc:
- def association_key_name
- reflection.foreign_key
- end
-
- def owner_key_name
- reflection.active_record_primary_key
- end
- end
- end
- end
-end
diff --git a/activerecord/lib/active_record/associations/preloader/has_one_through.rb b/activerecord/lib/active_record/associations/preloader/has_one_through.rb
deleted file mode 100644
index f063f85574..0000000000
--- a/activerecord/lib/active_record/associations/preloader/has_one_through.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-module ActiveRecord
- module Associations
- class Preloader
- class HasOneThrough < SingularAssociation #:nodoc:
- include ThroughAssociation
- end
- end
- end
-end
diff --git a/activerecord/lib/active_record/associations/preloader/singular_association.rb b/activerecord/lib/active_record/associations/preloader/singular_association.rb
deleted file mode 100644
index f60647a81e..0000000000
--- a/activerecord/lib/active_record/associations/preloader/singular_association.rb
+++ /dev/null
@@ -1,21 +0,0 @@
-module ActiveRecord
- module Associations
- class Preloader
- class SingularAssociation < Association #:nodoc:
-
- private
-
- def preload(preloader)
- associated_records_by_owner(preloader).each do |owner, associated_records|
- record = associated_records.first
-
- association = owner.association(reflection.name)
- association.target = record
- association.set_inverse_instance(record) if record
- end
- end
-
- end
- end
- end
-end
diff --git a/activerecord/lib/active_record/associations/preloader/through_association.rb b/activerecord/lib/active_record/associations/preloader/through_association.rb
index b0203909ce..bec1c4c94a 100644
--- a/activerecord/lib/active_record/associations/preloader/through_association.rb
+++ b/activerecord/lib/active_record/associations/preloader/through_association.rb
@@ -1,108 +1,115 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Associations
class Preloader
- module ThroughAssociation #:nodoc:
- def through_reflection
- reflection.through_reflection
+ class ThroughAssociation < Association # :nodoc:
+ PRELOADER = ActiveRecord::Associations::Preloader.new
+
+ def initialize(*)
+ super
+ @already_loaded = owners.first.association(through_reflection.name).loaded?
end
- def source_reflection
- reflection.source_reflection
+ def preloaded_records
+ @preloaded_records ||= source_preloaders.flat_map(&:preloaded_records)
end
- def associated_records_by_owner(preloader)
- preloader.preload(owners,
- through_reflection.name,
- through_scope)
+ def records_by_owner
+ return @records_by_owner if defined?(@records_by_owner)
+ source_records_by_owner = source_preloaders.map(&:records_by_owner).reduce(:merge)
+ through_records_by_owner = through_preloaders.map(&:records_by_owner).reduce(:merge)
- through_records = owners.map do |owner|
- association = owner.association through_reflection.name
+ @records_by_owner = owners.each_with_object({}) do |owner, result|
+ through_records = through_records_by_owner[owner] || []
- center = target_records_from_association(association)
- [owner, Array(center)]
- end
-
- reset_association owners, through_reflection.name
+ if @already_loaded
+ if source_type = reflection.options[:source_type]
+ through_records = through_records.select do |record|
+ record[reflection.foreign_type] == source_type
+ end
+ end
+ end
- middle_records = through_records.flat_map { |(_,rec)| rec }
+ records = through_records.flat_map do |record|
+ source_records_by_owner[record]
+ end
- preloaders = preloader.preload(middle_records,
- source_reflection.name,
- reflection_scope)
+ records.compact!
+ records.sort_by! { |rhs| preload_index[rhs] } if scope.order_values.any?
+ records.uniq! if scope.distinct_value
+ result[owner] = records
+ end
+ end
- @preloaded_records = preloaders.flat_map(&:preloaded_records)
+ private
+ def source_preloaders
+ @source_preloaders ||= PRELOADER.preload(middle_records, source_reflection.name, scope)
+ end
- middle_to_pl = preloaders.each_with_object({}) do |pl,h|
- pl.owners.each { |middle|
- h[middle] = pl
- }
+ def middle_records
+ through_preloaders.flat_map(&:preloaded_records)
end
- through_records.each_with_object({}) do |(lhs,center), records_by_owner|
- pl_to_middle = center.group_by { |record| middle_to_pl[record] }
+ def through_preloaders
+ @through_preloaders ||= PRELOADER.preload(owners, through_reflection.name, through_scope)
+ end
- records_by_owner[lhs] = pl_to_middle.flat_map do |pl, middles|
- rhs_records = middles.flat_map { |r|
- association = r.association source_reflection.name
+ def through_reflection
+ reflection.through_reflection
+ end
- target_records_from_association(association)
- }.compact
+ def source_reflection
+ reflection.source_reflection
+ end
- # Respect the order on `reflection_scope` if it exists, else use the natural order.
- if reflection_scope.values[:order].present?
- @id_map ||= id_to_index_map @preloaded_records
- rhs_records.sort_by { |rhs| @id_map[rhs] }
- else
- rhs_records
- end
+ def preload_index
+ @preload_index ||= preloaded_records.each_with_object({}).with_index do |(record, result), index|
+ result[record] = index
end
end
- end
- private
+ def through_scope
+ scope = through_reflection.klass.unscoped
+ options = reflection.options
- def id_to_index_map(ids)
- id_map = {}
- ids.each_with_index { |id, index| id_map[id] = index }
- id_map
- end
+ values = reflection_scope.values
+ if annotations = values[:annotate]
+ scope.annotate!(*annotations)
+ end
- def reset_association(owners, association_name)
- should_reset = (through_scope != through_reflection.klass.unscoped) ||
- (reflection.options[:source_type] && through_reflection.collection?)
+ if options[:source_type]
+ scope.where! reflection.foreign_type => options[:source_type]
+ elsif !reflection_scope.where_clause.empty?
+ scope.where_clause = reflection_scope.where_clause
- # Don't cache the association - we would only be caching a subset
- if should_reset
- owners.each { |owner|
- owner.association(association_name).reset
- }
- end
- end
+ if includes = values[:includes]
+ scope.includes!(source_reflection.name => includes)
+ else
+ scope.includes!(source_reflection.name)
+ end
+
+ if values[:references] && !values[:references].empty?
+ scope.references!(values[:references])
+ else
+ scope.references!(source_reflection.table_name)
+ end
+ if joins = values[:joins]
+ scope.joins!(source_reflection.name => joins)
+ end
- def through_scope
- scope = through_reflection.klass.unscoped
+ if left_outer_joins = values[:left_outer_joins]
+ scope.left_outer_joins!(source_reflection.name => left_outer_joins)
+ end
- if options[:source_type]
- scope.where! reflection.foreign_type => options[:source_type]
- else
- unless reflection_scope.where_clause.empty?
- scope.includes_values = Array(reflection_scope.values[:includes] || options[:source])
- scope.where_clause = reflection_scope.where_clause
+ if scope.eager_loading? && order_values = values[:order]
+ scope = scope.order(order_values)
+ end
end
- scope.references! reflection_scope.values[:references]
- if scope.eager_loading? && order_values = reflection_scope.values[:order]
- scope = scope.order(order_values)
- end
+ scope
end
-
- scope
- end
-
- def target_records_from_association(association)
- association.loaded? ? association.target : association.reader
- end
end
end
end
diff --git a/activerecord/lib/active_record/associations/singular_association.rb b/activerecord/lib/active_record/associations/singular_association.rb
index f913f0852a..a92932fa4b 100644
--- a/activerecord/lib/active_record/associations/singular_association.rb
+++ b/activerecord/lib/active_record/associations/singular_association.rb
@@ -1,17 +1,11 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Associations
class SingularAssociation < Association #:nodoc:
# Implements the reader method, e.g. foo.bar for Foo.has_one :bar
- def reader(force_reload = false)
- if force_reload && klass
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
- Passing an argument to force an association to reload is now
- deprecated and will be removed in Rails 5.1. Please call `reload`
- on the parent object instead.
- MSG
-
- klass.uncached { reload }
- elsif !loaded? || stale_target?
+ def reader
+ if !loaded? || stale_target?
reload
end
@@ -23,46 +17,26 @@ module ActiveRecord
replace(record)
end
- def create(attributes = {}, &block)
- _create_record(attributes, &block)
- end
-
- def create!(attributes = {}, &block)
- _create_record(attributes, true, &block)
- end
-
- def build(attributes = {})
- record = build_record(attributes)
- yield(record) if block_given?
+ def build(attributes = {}, &block)
+ record = build_record(attributes, &block)
set_new_record(record)
record
end
- private
-
- def create_scope
- scope.scope_for_create.stringify_keys.except(klass.primary_key)
- end
-
- def get_records
- return scope.limit(1).records if skip_statement_cache?
-
- conn = klass.connection
- sc = reflection.association_scope_cache(conn, owner) do
- StatementCache.create(conn) { |params|
- as = AssociationScope.create { params.bind }
- target_scope.merge(as.scope(self, conn)).limit(1)
- }
- end
+ # Implements the reload reader method, e.g. foo.reload_bar for
+ # Foo.has_one :bar
+ def force_reload_reader
+ reload(true)
+ target
+ end
- binds = AssociationScope.get_bind_values(owner, reflection.chain)
- sc.execute binds, klass, klass.connection
+ private
+ def scope_for_create
+ super.except!(klass.primary_key)
end
def find_target
- if record = get_records.first
- set_inverse_instance record
- end
+ super.first
end
def replace(record)
@@ -73,9 +47,8 @@ module ActiveRecord
replace(record)
end
- def _create_record(attributes, raise_error = false)
- record = build_record(attributes)
- yield(record) if block_given?
+ def _create_record(attributes, raise_error = false, &block)
+ record = build_record(attributes, &block)
saved = record.save
set_new_record(record)
raise RecordInvalid.new(record) if !saved && raise_error
diff --git a/activerecord/lib/active_record/associations/through_association.rb b/activerecord/lib/active_record/associations/through_association.rb
index d0ec3e8015..15e6565e69 100644
--- a/activerecord/lib/active_record/associations/through_association.rb
+++ b/activerecord/lib/active_record/associations/through_association.rb
@@ -1,11 +1,27 @@
+# frozen_string_literal: true
+
module ActiveRecord
- # = Active Record Through Association
module Associations
+ # = Active Record Through Association
module ThroughAssociation #:nodoc:
+ delegate :source_reflection, to: :reflection
+
+ private
+ def through_reflection
+ @through_reflection ||= begin
+ refl = reflection.through_reflection
- delegate :source_reflection, :through_reflection, :to => :reflection
+ while refl.through_reflection?
+ refl = refl.through_reflection
+ end
- protected
+ refl
+ end
+ end
+
+ def through_association
+ @through_association ||= owner.association(through_reflection.name)
+ end
# We merge in these scopes for two reasons:
#
@@ -14,7 +30,7 @@ module ActiveRecord
def target_scope
scope = super
reflection.chain.drop(1).each do |reflection|
- relation = reflection.klass.all
+ relation = reflection.klass.scope_for_association
scope.merge!(
relation.except(:select, :create_with, :includes, :preload, :joins, :eager_load)
)
@@ -22,8 +38,6 @@ module ActiveRecord
scope
end
- private
-
# Construct attributes for :through pointing to owner and associate. This is used by the
# methods which create and delete records on the association.
#
@@ -39,24 +53,22 @@ module ActiveRecord
def construct_join_attributes(*records)
ensure_mutable
- if source_reflection.association_primary_key(reflection.klass) == reflection.klass.primary_key
+ association_primary_key = source_reflection.association_primary_key(reflection.klass)
+
+ if association_primary_key == reflection.klass.primary_key && !options[:source_type]
join_attributes = { source_reflection.name => records }
else
join_attributes = {
- source_reflection.foreign_key =>
- records.map { |record|
- record.send(source_reflection.association_primary_key(reflection.klass))
- }
+ source_reflection.foreign_key => records.map(&association_primary_key.to_sym)
}
end
if options[:source_type]
- join_attributes[source_reflection.foreign_type] =
- records.map { |record| record.class.base_class.name }
+ join_attributes[source_reflection.foreign_type] = [ options[:source_type] ]
end
if records.count == 1
- Hash[join_attributes.map { |k, v| [k, v.first] }]
+ join_attributes.transform_values!(&:first)
else
join_attributes
end
@@ -102,7 +114,7 @@ module ActiveRecord
attributes[inverse.foreign_key] = target.id
end
- super(attributes)
+ super
end
end
end
diff --git a/activerecord/lib/active_record/attribute.rb b/activerecord/lib/active_record/attribute.rb
deleted file mode 100644
index 3c4c8f10ec..0000000000
--- a/activerecord/lib/active_record/attribute.rb
+++ /dev/null
@@ -1,207 +0,0 @@
-module ActiveRecord
- class Attribute # :nodoc:
- class << self
- def from_database(name, value, type)
- FromDatabase.new(name, value, type)
- end
-
- def from_user(name, value, type, original_attribute = nil)
- FromUser.new(name, value, type, original_attribute)
- end
-
- def with_cast_value(name, value, type)
- WithCastValue.new(name, value, type)
- end
-
- def null(name)
- Null.new(name)
- end
-
- def uninitialized(name, type)
- Uninitialized.new(name, type)
- end
- end
-
- attr_reader :name, :value_before_type_cast, :type
-
- # This method should not be called directly.
- # Use #from_database or #from_user
- def initialize(name, value_before_type_cast, type, original_attribute = nil)
- @name = name
- @value_before_type_cast = value_before_type_cast
- @type = type
- @original_attribute = original_attribute
- end
-
- def value
- # `defined?` is cheaper than `||=` when we get back falsy values
- @value = type_cast(value_before_type_cast) unless defined?(@value)
- @value
- end
-
- def original_value
- if assigned?
- original_attribute.original_value
- else
- type_cast(value_before_type_cast)
- end
- end
-
- def value_for_database
- type.serialize(value)
- end
-
- def changed?
- changed_from_assignment? || changed_in_place?
- end
-
- def changed_in_place?
- has_been_read? && type.changed_in_place?(original_value_for_database, value)
- end
-
- def forgetting_assignment
- with_value_from_database(value_for_database)
- end
-
- def with_value_from_user(value)
- type.assert_valid_value(value)
- self.class.from_user(name, value, type, self)
- end
-
- def with_value_from_database(value)
- self.class.from_database(name, value, type)
- end
-
- def with_cast_value(value)
- self.class.with_cast_value(name, value, type)
- end
-
- def with_type(type)
- self.class.new(name, value_before_type_cast, type, original_attribute)
- end
-
- def type_cast(*)
- raise NotImplementedError
- end
-
- def initialized?
- true
- end
-
- def came_from_user?
- false
- end
-
- def has_been_read?
- defined?(@value)
- end
-
- def ==(other)
- self.class == other.class &&
- name == other.name &&
- value_before_type_cast == other.value_before_type_cast &&
- type == other.type
- end
- alias eql? ==
-
- def hash
- [self.class, name, value_before_type_cast, type].hash
- end
-
- protected
-
- attr_reader :original_attribute
- alias_method :assigned?, :original_attribute
-
- def initialize_dup(other)
- if defined?(@value) && @value.duplicable?
- @value = @value.dup
- end
- end
-
- def changed_from_assignment?
- assigned? && type.changed?(original_value, value, value_before_type_cast)
- end
-
- def original_value_for_database
- if assigned?
- original_attribute.original_value_for_database
- else
- _original_value_for_database
- end
- end
-
- def _original_value_for_database
- value_for_database
- end
-
- class FromDatabase < Attribute # :nodoc:
- def type_cast(value)
- type.deserialize(value)
- end
-
- def _original_value_for_database
- value_before_type_cast
- end
- end
-
- class FromUser < Attribute # :nodoc:
- def type_cast(value)
- type.cast(value)
- end
-
- def came_from_user?
- true
- end
- end
-
- class WithCastValue < Attribute # :nodoc:
- def type_cast(value)
- value
- end
-
- def changed_in_place_from?(old_value)
- false
- end
- end
-
- class Null < Attribute # :nodoc:
- def initialize(name)
- super(name, nil, Type::Value.new)
- end
-
- def value
- nil
- end
-
- def with_type(type)
- self.class.with_cast_value(name, nil, type)
- end
-
- def with_value_from_database(value)
- raise ActiveModel::MissingAttributeError, "can't write unknown attribute `#{name}`"
- end
- alias_method :with_value_from_user, :with_value_from_database
- end
-
- class Uninitialized < Attribute # :nodoc:
- def initialize(name, type)
- super(name, nil, type)
- end
-
- def value
- if block_given?
- yield name
- end
- end
-
- def value_for_database
- end
-
- def initialized?
- false
- end
- end
- private_constant :FromDatabase, :FromUser, :Null, :Uninitialized, :WithCastValue
- end
-end
diff --git a/activerecord/lib/active_record/attribute/user_provided_default.rb b/activerecord/lib/active_record/attribute/user_provided_default.rb
deleted file mode 100644
index 4580813364..0000000000
--- a/activerecord/lib/active_record/attribute/user_provided_default.rb
+++ /dev/null
@@ -1,28 +0,0 @@
-require 'active_record/attribute'
-
-module ActiveRecord
- class Attribute # :nodoc:
- class UserProvidedDefault < FromUser # :nodoc:
- def initialize(name, value, type, database_default)
- @user_provided_value = value
- super(name, value, type, database_default)
- end
-
- def value_before_type_cast
- if user_provided_value.is_a?(Proc)
- @memoized_value_before_type_cast ||= user_provided_value.call
- else
- @user_provided_value
- end
- end
-
- def with_type(type)
- self.class.new(name, user_provided_value, type, original_attribute)
- end
-
- protected
-
- attr_reader :user_provided_value
- end
- end
-end
diff --git a/activerecord/lib/active_record/attribute_assignment.rb b/activerecord/lib/active_record/attribute_assignment.rb
index 4c22be8235..929045f29b 100644
--- a/activerecord/lib/active_record/attribute_assignment.rb
+++ b/activerecord/lib/active_record/attribute_assignment.rb
@@ -1,55 +1,50 @@
-require 'active_model/forbidden_attributes_protection'
+# frozen_string_literal: true
+
+require "active_model/forbidden_attributes_protection"
module ActiveRecord
module AttributeAssignment
- extend ActiveSupport::Concern
include ActiveModel::AttributeAssignment
- # Alias for ActiveModel::AttributeAssignment#assign_attributes. See ActiveModel::AttributeAssignment.
- def attributes=(attributes)
- assign_attributes(attributes)
- end
-
private
- def _assign_attributes(attributes) # :nodoc:
- multi_parameter_attributes = {}
- nested_parameter_attributes = {}
+ def _assign_attributes(attributes)
+ multi_parameter_attributes = {}
+ nested_parameter_attributes = {}
- attributes.each do |k, v|
- if k.include?("(")
- multi_parameter_attributes[k] = attributes.delete(k)
- elsif v.is_a?(Hash)
- nested_parameter_attributes[k] = attributes.delete(k)
+ attributes.each do |k, v|
+ if k.include?("(")
+ multi_parameter_attributes[k] = attributes.delete(k)
+ elsif v.is_a?(Hash)
+ nested_parameter_attributes[k] = attributes.delete(k)
+ end
end
- end
- super(attributes)
+ super(attributes)
- assign_nested_parameter_attributes(nested_parameter_attributes) unless nested_parameter_attributes.empty?
- assign_multiparameter_attributes(multi_parameter_attributes) unless multi_parameter_attributes.empty?
- end
+ assign_nested_parameter_attributes(nested_parameter_attributes) unless nested_parameter_attributes.empty?
+ assign_multiparameter_attributes(multi_parameter_attributes) unless multi_parameter_attributes.empty?
+ end
- # Assign any deferred nested attributes after the base attributes have been set.
- def assign_nested_parameter_attributes(pairs)
- pairs.each { |k, v| _assign_attribute(k, v) }
- end
+ # Assign any deferred nested attributes after the base attributes have been set.
+ def assign_nested_parameter_attributes(pairs)
+ pairs.each { |k, v| _assign_attribute(k, v) }
+ end
- # Instantiates objects for all attribute classes that needs more than one constructor parameter. This is done
- # by calling new on the column type or aggregation type (through composed_of) object with these parameters.
- # So having the pairs written_on(1) = "2004", written_on(2) = "6", written_on(3) = "24", will instantiate
- # written_on (a date type) with Date.new("2004", "6", "24"). You can also specify a typecast character in the
- # parentheses to have the parameters typecasted before they're used in the constructor. Use i for Fixnum and
- # f for Float. If all the values for a given attribute are empty, the attribute will be set to +nil+.
- def assign_multiparameter_attributes(pairs)
- execute_callstack_for_multiparameter_attributes(
- extract_callstack_for_multiparameter_attributes(pairs)
- )
- end
+ # Instantiates objects for all attribute classes that needs more than one constructor parameter. This is done
+ # by calling new on the column type or aggregation type (through composed_of) object with these parameters.
+ # So having the pairs written_on(1) = "2004", written_on(2) = "6", written_on(3) = "24", will instantiate
+ # written_on (a date type) with Date.new("2004", "6", "24"). You can also specify a typecast character in the
+ # parentheses to have the parameters typecasted before they're used in the constructor. Use i for Integer and
+ # f for Float. If all the values for a given attribute are empty, the attribute will be set to +nil+.
+ def assign_multiparameter_attributes(pairs)
+ execute_callstack_for_multiparameter_attributes(
+ extract_callstack_for_multiparameter_attributes(pairs)
+ )
+ end
- def execute_callstack_for_multiparameter_attributes(callstack)
- errors = []
- callstack.each do |name, values_with_empty_parameters|
- begin
+ def execute_callstack_for_multiparameter_attributes(callstack)
+ errors = []
+ callstack.each do |name, values_with_empty_parameters|
if values_with_empty_parameters.each_value.all?(&:nil?)
values = nil
else
@@ -59,33 +54,32 @@ module ActiveRecord
rescue => ex
errors << AttributeAssignmentError.new("error on assignment #{values_with_empty_parameters.values.inspect} to #{name} (#{ex.message})", ex, name)
end
+ unless errors.empty?
+ error_descriptions = errors.map(&:message).join(",")
+ raise MultiparameterAssignmentErrors.new(errors), "#{errors.size} error(s) on assignment of multiparameter attributes [#{error_descriptions}]"
+ end
end
- unless errors.empty?
- error_descriptions = errors.map(&:message).join(",")
- raise MultiparameterAssignmentErrors.new(errors), "#{errors.size} error(s) on assignment of multiparameter attributes [#{error_descriptions}]"
- end
- end
- def extract_callstack_for_multiparameter_attributes(pairs)
- attributes = {}
+ def extract_callstack_for_multiparameter_attributes(pairs)
+ attributes = {}
- pairs.each do |(multiparameter_name, value)|
- attribute_name = multiparameter_name.split("(").first
- attributes[attribute_name] ||= {}
+ pairs.each do |(multiparameter_name, value)|
+ attribute_name = multiparameter_name.split("(").first
+ attributes[attribute_name] ||= {}
- parameter_value = value.empty? ? nil : type_cast_attribute_value(multiparameter_name, value)
- attributes[attribute_name][find_parameter_position(multiparameter_name)] ||= parameter_value
- end
+ parameter_value = value.empty? ? nil : type_cast_attribute_value(multiparameter_name, value)
+ attributes[attribute_name][find_parameter_position(multiparameter_name)] ||= parameter_value
+ end
- attributes
- end
+ attributes
+ end
- def type_cast_attribute_value(multiparameter_name, value)
- multiparameter_name =~ /\([0-9]*([if])\)/ ? value.send("to_" + $1) : value
- end
+ def type_cast_attribute_value(multiparameter_name, value)
+ multiparameter_name =~ /\([0-9]*([if])\)/ ? value.send("to_" + $1) : value
+ end
- def find_parameter_position(multiparameter_name)
- multiparameter_name.scan(/\(([0-9]*).*\)/).first.first.to_i
- end
+ def find_parameter_position(multiparameter_name)
+ multiparameter_name.scan(/\(([0-9]*).*\)/).first.first.to_i
+ end
end
end
diff --git a/activerecord/lib/active_record/attribute_decorators.rb b/activerecord/lib/active_record/attribute_decorators.rb
index 7d0ae32411..98b7805c0a 100644
--- a/activerecord/lib/active_record/attribute_decorators.rb
+++ b/activerecord/lib/active_record/attribute_decorators.rb
@@ -1,19 +1,42 @@
+# frozen_string_literal: true
+
module ActiveRecord
module AttributeDecorators # :nodoc:
extend ActiveSupport::Concern
included do
- class_attribute :attribute_type_decorations, instance_accessor: false # :internal:
- self.attribute_type_decorations = TypeDecorator.new
+ class_attribute :attribute_type_decorations, instance_accessor: false, default: TypeDecorator.new # :internal:
end
module ClassMethods # :nodoc:
+ # This method is an internal API used to create class macros such as
+ # +serialize+, and features like time zone aware attributes.
+ #
+ # Used to wrap the type of an attribute in a new type.
+ # When the schema for a model is loaded, attributes with the same name as
+ # +column_name+ will have their type yielded to the given block. The
+ # return value of that block will be used instead.
+ #
+ # Subsequent calls where +column_name+ and +decorator_name+ are the same
+ # will override the previous decorator, not decorate twice. This can be
+ # used to create idempotent class macros like +serialize+
def decorate_attribute_type(column_name, decorator_name, &block)
matcher = ->(name, _) { name == column_name.to_s }
key = "_#{column_name}_#{decorator_name}"
decorate_matching_attribute_types(matcher, key, &block)
end
+ # This method is an internal API used to create higher level features like
+ # time zone aware attributes.
+ #
+ # When the schema for a model is loaded, +matcher+ will be called for each
+ # attribute with its name and type. If the matcher returns a truthy value,
+ # the type will then be yielded to the given block, and the return value
+ # of that block will replace the type.
+ #
+ # Subsequent calls to this method with the same value for +decorator_name+
+ # will replace the previous decorator, not decorate twice. This can be
+ # used to ensure that class macros are idempotent.
def decorate_matching_attribute_types(matcher, decorator_name, &block)
reload_schema_from_cache
decorator_name = decorator_name.to_s
@@ -24,13 +47,13 @@ module ActiveRecord
private
- def load_schema!
- super
- attribute_types.each do |name, type|
- decorated_type = attribute_type_decorations.apply(name, type)
- define_attribute(name, decorated_type)
+ def load_schema!
+ super
+ attribute_types.each do |name, type|
+ decorated_type = attribute_type_decorations.apply(name, type)
+ define_attribute(name, decorated_type)
+ end
end
- end
end
class TypeDecorator # :nodoc:
@@ -53,15 +76,15 @@ module ActiveRecord
private
- def decorators_for(name, type)
- matching(name, type).map(&:last)
- end
+ def decorators_for(name, type)
+ matching(name, type).map(&:last)
+ end
- def matching(name, type)
- @decorations.values.select do |(matcher, _)|
- matcher.call(name, type)
+ def matching(name, type)
+ @decorations.values.select do |(matcher, _)|
+ matcher.call(name, type)
+ end
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/attribute_methods.rb b/activerecord/lib/active_record/attribute_methods.rb
index e902eb7531..af7e46e649 100644
--- a/activerecord/lib/active_record/attribute_methods.rb
+++ b/activerecord/lib/active_record/attribute_methods.rb
@@ -1,7 +1,6 @@
-require 'active_support/core_ext/enumerable'
-require 'active_support/core_ext/string/filters'
-require 'mutex_m'
-require 'concurrent/map'
+# frozen_string_literal: true
+
+require "mutex_m"
module ActiveRecord
# = Active Record Attribute Methods
@@ -23,18 +22,11 @@ module ActiveRecord
delegate :column_for_attribute, to: :class
end
- AttrNames = Module.new {
- def self.set_name_cache(name, value)
- const_name = "ATTR_#{name}"
- unless const_defined? const_name
- const_set const_name, value.dup.freeze
- end
- end
- }
-
- BLACKLISTED_CLASS_METHODS = %w(private public protected allocate new name parent superclass)
+ RESTRICTED_CLASS_METHODS = %w(private public protected allocate new name parent superclass)
- class GeneratedAttributeMethods < Module; end # :nodoc:
+ class GeneratedAttributeMethodsBuilder < Module #:nodoc:
+ include Mutex_m
+ end
module ClassMethods
def inherited(child_class) #:nodoc:
@@ -43,7 +35,8 @@ module ActiveRecord
end
def initialize_generated_modules # :nodoc:
- @generated_attribute_methods = GeneratedAttributeMethods.new { extend Mutex_m }
+ @generated_attribute_methods = const_set(:GeneratedAttributeMethods, GeneratedAttributeMethodsBuilder.new)
+ private_constant :GeneratedAttributeMethods
@attribute_methods_generated = false
include @generated_attribute_methods
@@ -58,11 +51,10 @@ module ActiveRecord
# attribute methods.
generated_attribute_methods.synchronize do
return false if @attribute_methods_generated
- superclass.define_attribute_methods unless self == base_class
+ superclass.define_attribute_methods unless base_class?
super(attribute_names)
@attribute_methods_generated = true
end
- true
end
def undefine_attribute_methods # :nodoc:
@@ -97,7 +89,7 @@ module ActiveRecord
# If ThisClass < ... < SomeSuperClass < ... < Base and SomeSuperClass
# defines its own attribute method, then we don't want to overwrite that.
defined = method_defined_within?(method_name, superclass, Base) &&
- ! superclass.instance_method(method_name).owner.is_a?(GeneratedAttributeMethods)
+ ! superclass.instance_method(method_name).owner.is_a?(GeneratedAttributeMethodsBuilder)
defined || super
end
end
@@ -123,7 +115,7 @@ module ActiveRecord
# A class method is 'dangerous' if it is already (re)defined by Active Record, but
# not by any ancestors. (So 'puts' is not dangerous but 'new' is.)
def dangerous_class_method?(method_name)
- BLACKLISTED_CLASS_METHODS.include?(method_name.to_s) || class_method_defined_within?(method_name, Base)
+ RESTRICTED_CLASS_METHODS.include?(method_name.to_s) || class_method_defined_within?(method_name, Base)
end
def class_method_defined_within?(name, klass, superklass = klass.superclass) # :nodoc:
@@ -148,7 +140,7 @@ module ActiveRecord
# Person.attribute_method?(:age=) # => true
# Person.attribute_method?(:nothing) # => false
def attribute_method?(attribute)
- super || (table_exists? && column_names.include?(attribute.to_s.sub(/=$/, '')))
+ super || (table_exists? && column_names.include?(attribute.to_s.sub(/=$/, "")))
end
# Returns an array of column names as strings if it's not an abstract class and
@@ -161,10 +153,61 @@ module ActiveRecord
# # => ["id", "created_at", "updated_at", "name", "age"]
def attribute_names
@attribute_names ||= if !abstract_class? && table_exists?
- attribute_types.keys
- else
- []
- end
+ attribute_types.keys
+ else
+ []
+ end
+ end
+
+ # Regexp for column names (with or without a table name prefix). Matches
+ # the following:
+ # "#{table_name}.#{column_name}"
+ # "#{column_name}"
+ COLUMN_NAME = /\A(?:\w+\.)?\w+\z/i
+
+ # Regexp for column names with order (with or without a table name
+ # prefix, with or without various order modifiers). Matches the following:
+ # "#{table_name}.#{column_name}"
+ # "#{table_name}.#{column_name} #{direction}"
+ # "#{table_name}.#{column_name} #{direction} NULLS FIRST"
+ # "#{table_name}.#{column_name} NULLS LAST"
+ # "#{column_name}"
+ # "#{column_name} #{direction}"
+ # "#{column_name} #{direction} NULLS FIRST"
+ # "#{column_name} NULLS LAST"
+ COLUMN_NAME_WITH_ORDER = /
+ \A
+ (?:\w+\.)?
+ \w+
+ (?:\s+asc|\s+desc)?
+ (?:\s+nulls\s+(?:first|last))?
+ \z
+ /ix
+
+ def disallow_raw_sql!(args, permit: COLUMN_NAME) # :nodoc:
+ unexpected = args.reject do |arg|
+ Arel.arel_node?(arg) ||
+ arg.to_s.split(/\s*,\s*/).all? { |part| permit.match?(part) }
+ end
+
+ return if unexpected.none?
+
+ if allow_unsafe_raw_sql == :deprecated
+ ActiveSupport::Deprecation.warn(
+ "Dangerous query method (method whose arguments are used as raw " \
+ "SQL) called with non-attribute argument(s): " \
+ "#{unexpected.map(&:inspect).join(", ")}. Non-attribute " \
+ "arguments will be disallowed in Rails 6.0. This method should " \
+ "not be called with user-provided values, such as request " \
+ "parameters or model attributes. Known-safe values can be passed " \
+ "by wrapping them in Arel.sql()."
+ )
+ else
+ raise(ActiveRecord::UnknownAttributeReference,
+ "Query method called with non-attribute argument(s): " +
+ unexpected.map(&:inspect).join(", ")
+ )
+ end
end
# Returns true if the given attribute exists, otherwise false.
@@ -209,34 +252,27 @@ module ActiveRecord
# end
#
# person = Person.new
- # person.respond_to(:name) # => true
- # person.respond_to(:name=) # => true
- # person.respond_to(:name?) # => true
- # person.respond_to('age') # => true
- # person.respond_to('age=') # => true
- # person.respond_to('age?') # => true
- # person.respond_to(:nothing) # => false
+ # person.respond_to?(:name) # => true
+ # person.respond_to?(:name=) # => true
+ # person.respond_to?(:name?) # => true
+ # person.respond_to?('age') # => true
+ # person.respond_to?('age=') # => true
+ # person.respond_to?('age?') # => true
+ # person.respond_to?(:nothing) # => false
def respond_to?(name, include_private = false)
return false unless super
- case name
- when :to_partial_path
- name = "to_partial_path".freeze
- when :to_model
- name = "to_model".freeze
- else
- name = name.to_s
- end
-
# If the result is true then check for the select case.
# For queries selecting a subset of columns, return false for unselected columns.
# We check defined?(@attributes) not to issue warnings if called on objects that
# have been allocated but not yet initialized.
- if defined?(@attributes) && self.class.column_names.include?(name)
- return has_attribute?(name)
+ if defined?(@attributes)
+ if name = self.class.symbol_column_to_string(name.to_sym)
+ return has_attribute?(name)
+ end
end
- return true
+ true
end
# Returns +true+ if the given attribute is in the attributes hash, otherwise +false+.
@@ -279,9 +315,8 @@ module ActiveRecord
# Returns an <tt>#inspect</tt>-like string for the value of the
# attribute +attr_name+. String attributes are truncated up to 50
# characters, Date and Time attributes are returned in the
- # <tt>:db</tt> format, Array attributes are truncated up to 10 values.
- # Other attributes return the value of <tt>#inspect</tt> without
- # modification.
+ # <tt>:db</tt> format. Other attributes return the value of
+ # <tt>#inspect</tt> without modification.
#
# person = Person.create!(name: 'David Heinemeier Hansson ' * 3)
#
@@ -292,20 +327,10 @@ module ActiveRecord
# # => "\"2012-10-22 00:15:07\""
#
# person.attribute_for_inspect(:tag_ids)
- # # => "[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, ...]"
+ # # => "[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]"
def attribute_for_inspect(attr_name)
- value = read_attribute(attr_name)
-
- if value.is_a?(String) && value.length > 50
- "#{value[0, 50]}...".inspect
- elsif value.is_a?(Date) || value.is_a?(Time)
- %("#{value.to_s(:db)}")
- elsif value.is_a?(Array) && value.size > 10
- inspected = value.first(10).inspect
- %(#{inspected[0...-1]}, ...])
- else
- value.inspect
- end
+ value = _read_attribute(attr_name)
+ format_for_inspect(value)
end
# Returns +true+ if the specified +attribute+ has been set by the user or by a
@@ -334,8 +359,6 @@ module ActiveRecord
#
# Note: +:id+ is always present.
#
- # Alias for the #read_attribute method.
- #
# class Person < ActiveRecord::Base
# belongs_to :organization
# end
@@ -360,7 +383,7 @@ module ActiveRecord
# person = Person.new
# person[:age] = '22'
# person[:age] # => 22
- # person[:age] # => Fixnum
+ # person[:age].class # => Integer
def []=(attr_name, value)
write_attribute(attr_name, value)
end
@@ -398,67 +421,51 @@ module ActiveRecord
@attributes.accessed
end
- protected
-
- def clone_attribute_value(reader_method, attribute_name) # :nodoc:
- value = send(reader_method, attribute_name)
- value.duplicable? ? value.clone : value
- rescue TypeError, NoMethodError
- value
- end
-
- def arel_attributes_with_values_for_create(attribute_names) # :nodoc:
- arel_attributes_with_values(attributes_for_create(attribute_names))
- end
-
- def arel_attributes_with_values_for_update(attribute_names) # :nodoc:
- arel_attributes_with_values(attributes_for_update(attribute_names))
- end
-
- def attribute_method?(attr_name) # :nodoc:
- # We check defined? because Syck calls respond_to? before actually calling initialize.
- defined?(@attributes) && @attributes.key?(attr_name)
- end
-
private
+ def attribute_method?(attr_name)
+ # We check defined? because Syck calls respond_to? before actually calling initialize.
+ defined?(@attributes) && @attributes.key?(attr_name)
+ end
- # Returns a Hash of the Arel::Attributes and attribute values that have been
- # typecasted for use in an Arel insert/update method.
- def arel_attributes_with_values(attribute_names)
- attrs = {}
- arel_table = self.class.arel_table
-
- attribute_names.each do |name|
- attrs[arel_table[name]] = typecasted_attribute_value(name)
+ def attributes_with_values(attribute_names)
+ attribute_names.each_with_object({}) do |name, attrs|
+ attrs[name] = _read_attribute(name)
+ end
end
- attrs
- end
- # Filters the primary keys and readonly attributes from the attribute names.
- def attributes_for_update(attribute_names)
- attribute_names.reject do |name|
- readonly_attribute?(name)
+ # Filters the primary keys and readonly attributes from the attribute names.
+ def attributes_for_update(attribute_names)
+ attribute_names &= self.class.column_names
+ attribute_names.delete_if do |name|
+ readonly_attribute?(name)
+ end
end
- end
- # Filters out the primary keys, from the attribute names, when the primary
- # key is to be generated (e.g. the id attribute has no value).
- def attributes_for_create(attribute_names)
- attribute_names.reject do |name|
- pk_attribute?(name) && id.nil?
+ # Filters out the primary keys, from the attribute names, when the primary
+ # key is to be generated (e.g. the id attribute has no value).
+ def attributes_for_create(attribute_names)
+ attribute_names &= self.class.column_names
+ attribute_names.delete_if do |name|
+ pk_attribute?(name) && id.nil?
+ end
end
- end
- def readonly_attribute?(name)
- self.class.readonly_attributes.include?(name)
- end
+ def format_for_inspect(value)
+ if value.is_a?(String) && value.length > 50
+ "#{value[0, 50]}...".inspect
+ elsif value.is_a?(Date) || value.is_a?(Time)
+ %("#{value.to_s(:db)}")
+ else
+ value.inspect
+ end
+ end
- def pk_attribute?(name)
- name == self.class.primary_key
- end
+ def readonly_attribute?(name)
+ self.class.readonly_attributes.include?(name)
+ end
- def typecasted_attribute_value(name)
- _read_attribute(name)
- end
+ def pk_attribute?(name)
+ name == self.class.primary_key
+ end
end
end
diff --git a/activerecord/lib/active_record/attribute_methods/before_type_cast.rb b/activerecord/lib/active_record/attribute_methods/before_type_cast.rb
index 1db6776688..5941f51a1a 100644
--- a/activerecord/lib/active_record/attribute_methods/before_type_cast.rb
+++ b/activerecord/lib/active_record/attribute_methods/before_type_cast.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module AttributeMethods
# = Active Record Attribute Methods Before Type Cast
@@ -63,14 +65,14 @@ module ActiveRecord
private
- # Handle *_before_type_cast for method_missing.
- def attribute_before_type_cast(attribute_name)
- read_attribute_before_type_cast(attribute_name)
- end
+ # Handle *_before_type_cast for method_missing.
+ def attribute_before_type_cast(attribute_name)
+ read_attribute_before_type_cast(attribute_name)
+ end
- def attribute_came_from_user?(attribute_name)
- @attributes[attribute_name].came_from_user?
- end
+ def attribute_came_from_user?(attribute_name)
+ @attributes[attribute_name].came_from_user?
+ end
end
end
end
diff --git a/activerecord/lib/active_record/attribute_methods/dirty.rb b/activerecord/lib/active_record/attribute_methods/dirty.rb
index 0bcfa5f00d..45e4b8adfa 100644
--- a/activerecord/lib/active_record/attribute_methods/dirty.rb
+++ b/activerecord/lib/active_record/attribute_methods/dirty.rb
@@ -1,9 +1,10 @@
-require 'active_support/core_ext/module/attribute_accessors'
-require 'active_record/attribute_mutation_tracker'
+# frozen_string_literal: true
+
+require "active_support/core_ext/module/attribute_accessors"
module ActiveRecord
module AttributeMethods
- module Dirty # :nodoc:
+ module Dirty
extend ActiveSupport::Concern
include ActiveModel::Dirty
@@ -13,139 +14,175 @@ module ActiveRecord
raise "You cannot include Dirty after Timestamp"
end
- class_attribute :partial_writes, instance_writer: false
- self.partial_writes = true
- end
+ class_attribute :partial_writes, instance_writer: false, default: true
- # Attempts to +save+ the record and clears changed attributes if successful.
- def save(*)
- if status = super
- changes_applied
- end
- status
- end
+ # Attribute methods for "changed in last call to save?"
+ attribute_method_affix(prefix: "saved_change_to_", suffix: "?")
+ attribute_method_prefix("saved_change_to_")
+ attribute_method_suffix("_before_last_save")
- # Attempts to <tt>save!</tt> the record and clears changed attributes if successful.
- def save!(*)
- super.tap do
- changes_applied
- end
+ # Attribute methods for "will change if I call save?"
+ attribute_method_affix(prefix: "will_save_change_to_", suffix: "?")
+ attribute_method_suffix("_change_to_be_saved", "_in_database")
end
# <tt>reload</tt> the record and clears changed attributes.
def reload(*)
super.tap do
- @mutation_tracker = nil
- @previous_mutation_tracker = nil
- @changed_attributes = HashWithIndifferentAccess.new
- end
- end
-
- def initialize_dup(other) # :nodoc:
- super
- @attributes = self.class._default_attributes.map do |attr|
- attr.with_value_from_user(@attributes.fetch_value(attr.name))
+ @previously_changed = ActiveSupport::HashWithIndifferentAccess.new
+ @mutations_before_last_save = nil
+ @attributes_changed_by_setter = ActiveSupport::HashWithIndifferentAccess.new
+ @mutations_from_database = nil
end
- @mutation_tracker = nil
end
- def changes_applied
- @previous_mutation_tracker = mutation_tracker
- @changed_attributes = HashWithIndifferentAccess.new
- store_original_attributes
+ # Did this attribute change when we last saved?
+ #
+ # This method is useful in after callbacks to determine if an attribute
+ # was changed during the save that triggered the callbacks to run. It can
+ # be invoked as +saved_change_to_name?+ instead of
+ # <tt>saved_change_to_attribute?("name")</tt>.
+ #
+ # ==== Options
+ #
+ # +from+ When passed, this method will return false unless the original
+ # value is equal to the given option
+ #
+ # +to+ When passed, this method will return false unless the value was
+ # changed to the given value
+ def saved_change_to_attribute?(attr_name, **options)
+ mutations_before_last_save.changed?(attr_name, **options)
+ end
+
+ # Returns the change to an attribute during the last save. If the
+ # attribute was changed, the result will be an array containing the
+ # original value and the saved value.
+ #
+ # This method is useful in after callbacks, to see the change in an
+ # attribute during the save that triggered the callbacks to run. It can be
+ # invoked as +saved_change_to_name+ instead of
+ # <tt>saved_change_to_attribute("name")</tt>.
+ def saved_change_to_attribute(attr_name)
+ mutations_before_last_save.change_to_attribute(attr_name)
+ end
+
+ # Returns the original value of an attribute before the last save.
+ #
+ # This method is useful in after callbacks to get the original value of an
+ # attribute before the save that triggered the callbacks to run. It can be
+ # invoked as +name_before_last_save+ instead of
+ # <tt>attribute_before_last_save("name")</tt>.
+ def attribute_before_last_save(attr_name)
+ mutations_before_last_save.original_value(attr_name)
+ end
+
+ # Did the last call to +save+ have any changes to change?
+ def saved_changes?
+ mutations_before_last_save.any_changes?
+ end
+
+ # Returns a hash containing all the changes that were just saved.
+ def saved_changes
+ mutations_before_last_save.changes
+ end
+
+ # Will this attribute change the next time we save?
+ #
+ # This method is useful in validations and before callbacks to determine
+ # if the next call to +save+ will change a particular attribute. It can be
+ # invoked as +will_save_change_to_name?+ instead of
+ # <tt>will_save_change_to_attribute("name")</tt>.
+ #
+ # ==== Options
+ #
+ # +from+ When passed, this method will return false unless the original
+ # value is equal to the given option
+ #
+ # +to+ When passed, this method will return false unless the value will be
+ # changed to the given value
+ def will_save_change_to_attribute?(attr_name, **options)
+ mutations_from_database.changed?(attr_name, **options)
+ end
+
+ # Returns the change to an attribute that will be persisted during the
+ # next save.
+ #
+ # This method is useful in validations and before callbacks, to see the
+ # change to an attribute that will occur when the record is saved. It can
+ # be invoked as +name_change_to_be_saved+ instead of
+ # <tt>attribute_change_to_be_saved("name")</tt>.
+ #
+ # If the attribute will change, the result will be an array containing the
+ # original value and the new value about to be saved.
+ def attribute_change_to_be_saved(attr_name)
+ mutations_from_database.change_to_attribute(attr_name)
+ end
+
+ # Returns the value of an attribute in the database, as opposed to the
+ # in-memory value that will be persisted the next time the record is
+ # saved.
+ #
+ # This method is useful in validations and before callbacks, to see the
+ # original value of an attribute prior to any changes about to be
+ # saved. It can be invoked as +name_in_database+ instead of
+ # <tt>attribute_in_database("name")</tt>.
+ def attribute_in_database(attr_name)
+ mutations_from_database.original_value(attr_name)
+ end
+
+ # Will the next call to +save+ have any changes to persist?
+ def has_changes_to_save?
+ mutations_from_database.any_changes?
+ end
+
+ # Returns a hash containing all the changes that will be persisted during
+ # the next save.
+ def changes_to_save
+ mutations_from_database.changes
+ end
+
+ # Returns an array of the names of any attributes that will change when
+ # the record is next saved.
+ def changed_attribute_names_to_save
+ mutations_from_database.changed_attribute_names
+ end
+
+ # Returns a hash of the attributes that will change when the record is
+ # next saved.
+ #
+ # The hash keys are the attribute names, and the hash values are the
+ # original attribute values in the database (as opposed to the in-memory
+ # values about to be saved).
+ def attributes_in_database
+ mutations_from_database.changed_values
end
- def clear_changes_information
- @previous_mutation_tracker = nil
- @changed_attributes = HashWithIndifferentAccess.new
- store_original_attributes
- end
-
- def raw_write_attribute(attr_name, *)
- result = super
- clear_attribute_change(attr_name)
- result
- end
-
- def clear_attribute_changes(attr_names)
- super
- attr_names.each do |attr_name|
- clear_attribute_change(attr_name)
+ private
+ def write_attribute_without_type_cast(attr_name, value)
+ name = attr_name.to_s
+ if self.class.attribute_alias?(name)
+ name = self.class.attribute_alias(name)
+ end
+ result = super(name, value)
+ clear_attribute_change(name)
+ result
end
- end
- def changed_attributes
- # This should only be set by methods which will call changed_attributes
- # multiple times when it is known that the computed value cannot change.
- if defined?(@cached_changed_attributes)
- @cached_changed_attributes
- else
- super.reverse_merge(mutation_tracker.changed_values).freeze
+ def _update_record(attribute_names = attribute_names_for_partial_writes)
+ affected_rows = super
+ changes_applied
+ affected_rows
end
- end
- def changes
- cache_changed_attributes do
- super
+ def _create_record(attribute_names = attribute_names_for_partial_writes)
+ id = super
+ changes_applied
+ id
end
- end
-
- def previous_changes
- previous_mutation_tracker.changes
- end
- def attribute_changed_in_place?(attr_name)
- mutation_tracker.changed_in_place?(attr_name)
- end
-
- private
-
- def mutation_tracker
- unless defined?(@mutation_tracker)
- @mutation_tracker = nil
+ def attribute_names_for_partial_writes
+ partial_writes? ? changed_attribute_names_to_save : attribute_names
end
- @mutation_tracker ||= AttributeMutationTracker.new(@attributes)
- end
-
- def changes_include?(attr_name)
- super || mutation_tracker.changed?(attr_name)
- end
-
- def clear_attribute_change(attr_name)
- mutation_tracker.forget_change(attr_name)
- end
-
- def _update_record(*)
- partial_writes? ? super(keys_for_partial_write) : super
- end
-
- def _create_record(*)
- partial_writes? ? super(keys_for_partial_write) : super
- end
-
- def keys_for_partial_write
- changed & self.class.column_names
- end
-
- def store_original_attributes
- @attributes = @attributes.map(&:forgetting_assignment)
- @mutation_tracker = nil
- end
-
- def previous_mutation_tracker
- @previous_mutation_tracker ||= NullMutationTracker.instance
- end
-
- def cache_changed_attributes
- @cached_changed_attributes = changed_attributes
- yield
- ensure
- clear_changed_attributes_cache
- end
-
- def clear_changed_attributes_cache
- remove_instance_variable(:@cached_changed_attributes) if defined?(@cached_changed_attributes)
- end
end
end
end
diff --git a/activerecord/lib/active_record/attribute_methods/primary_key.rb b/activerecord/lib/active_record/attribute_methods/primary_key.rb
index 0d5cb8b37c..6af5346fa7 100644
--- a/activerecord/lib/active_record/attribute_methods/primary_key.rb
+++ b/activerecord/lib/active_record/attribute_methods/primary_key.rb
@@ -1,4 +1,6 @@
-require 'set'
+# frozen_string_literal: true
+
+require "set"
module ActiveRecord
module AttributeMethods
@@ -8,121 +10,135 @@ module ActiveRecord
# Returns this record's primary key value wrapped in an array if one is
# available.
def to_key
- sync_with_transaction_state
- key = self.id
+ key = id
[key] if key
end
- # Returns the primary key value.
+ # Returns the primary key column's value.
def id
- if pk = self.class.primary_key
- sync_with_transaction_state
- _read_attribute(pk)
- end
+ sync_with_transaction_state
+ primary_key = self.class.primary_key
+ _read_attribute(primary_key) if primary_key
end
- # Sets the primary key value.
+ # Sets the primary key column's value.
def id=(value)
sync_with_transaction_state
- write_attribute(self.class.primary_key, value) if self.class.primary_key
+ primary_key = self.class.primary_key
+ _write_attribute(primary_key, value) if primary_key
end
- # Queries the primary key value.
+ # Queries the primary key column's value.
def id?
sync_with_transaction_state
query_attribute(self.class.primary_key)
end
- # Returns the primary key value before type cast.
+ # Returns the primary key column's value before type cast.
def id_before_type_cast
sync_with_transaction_state
read_attribute_before_type_cast(self.class.primary_key)
end
- # Returns the primary key previous value.
+ # Returns the primary key column's previous value.
def id_was
sync_with_transaction_state
attribute_was(self.class.primary_key)
end
- protected
-
- def attribute_method?(attr_name)
- attr_name == 'id' || super
+ # Returns the primary key column's value from the database.
+ def id_in_database
+ sync_with_transaction_state
+ attribute_in_database(self.class.primary_key)
end
- module ClassMethods
- def define_method_attribute(attr_name)
- super
+ private
- if attr_name == primary_key && attr_name != 'id'
- generated_attribute_methods.send(:alias_method, :id, primary_key)
- end
+ def attribute_method?(attr_name)
+ attr_name == "id" || super
end
- ID_ATTRIBUTE_METHODS = %w(id id= id? id_before_type_cast id_was).to_set
+ module ClassMethods
+ ID_ATTRIBUTE_METHODS = %w(id id= id? id_before_type_cast id_was id_in_database).to_set
- def dangerous_attribute_method?(method_name)
- super && !ID_ATTRIBUTE_METHODS.include?(method_name)
- end
+ def instance_method_already_implemented?(method_name)
+ super || primary_key && ID_ATTRIBUTE_METHODS.include?(method_name)
+ end
- # Defines the primary key field -- can be overridden in subclasses.
- # Overwriting will negate any effect of the +primary_key_prefix_type+
- # setting, though.
- def primary_key
- @primary_key = reset_primary_key unless defined? @primary_key
- @primary_key
- end
+ def dangerous_attribute_method?(method_name)
+ super && !ID_ATTRIBUTE_METHODS.include?(method_name)
+ end
- # Returns a quoted version of the primary key name, used to construct
- # SQL statements.
- def quoted_primary_key
- @quoted_primary_key ||= connection.quote_column_name(primary_key)
- end
+ # Defines the primary key field -- can be overridden in subclasses.
+ # Overwriting will negate any effect of the +primary_key_prefix_type+
+ # setting, though.
+ def primary_key
+ @primary_key = reset_primary_key unless defined? @primary_key
+ @primary_key
+ end
- def reset_primary_key #:nodoc:
- if self == base_class
- self.primary_key = get_primary_key(base_class.name)
- else
- self.primary_key = base_class.primary_key
+ # Returns a quoted version of the primary key name, used to construct
+ # SQL statements.
+ def quoted_primary_key
+ @quoted_primary_key ||= connection.quote_column_name(primary_key)
end
- end
- def get_primary_key(base_name) #:nodoc:
- if base_name && primary_key_prefix_type == :table_name
- base_name.foreign_key(false)
- elsif base_name && primary_key_prefix_type == :table_name_with_underscore
- base_name.foreign_key
- else
- if ActiveRecord::Base != self && table_exists?
- connection.schema_cache.primary_keys(table_name)
+ def reset_primary_key #:nodoc:
+ if base_class?
+ self.primary_key = get_primary_key(base_class.name)
else
- 'id'
+ self.primary_key = base_class.primary_key
end
end
- end
- # Sets the name of the primary key column.
- #
- # class Project < ActiveRecord::Base
- # self.primary_key = 'sysid'
- # end
- #
- # You can also define the #primary_key method yourself:
- #
- # class Project < ActiveRecord::Base
- # def self.primary_key
- # 'foo_' + super
- # end
- # end
- #
- # Project.primary_key # => "foo_id"
- def primary_key=(value)
- @primary_key = value && value.to_s
- @quoted_primary_key = nil
- @attributes_builder = nil
+ def get_primary_key(base_name) #:nodoc:
+ if base_name && primary_key_prefix_type == :table_name
+ base_name.foreign_key(false)
+ elsif base_name && primary_key_prefix_type == :table_name_with_underscore
+ base_name.foreign_key
+ else
+ if ActiveRecord::Base != self && table_exists?
+ pk = connection.schema_cache.primary_keys(table_name)
+ suppress_composite_primary_key(pk)
+ else
+ "id"
+ end
+ end
+ end
+
+ # Sets the name of the primary key column.
+ #
+ # class Project < ActiveRecord::Base
+ # self.primary_key = 'sysid'
+ # end
+ #
+ # You can also define the #primary_key method yourself:
+ #
+ # class Project < ActiveRecord::Base
+ # def self.primary_key
+ # 'foo_' + super
+ # end
+ # end
+ #
+ # Project.primary_key # => "foo_id"
+ def primary_key=(value)
+ @primary_key = value && value.to_s
+ @quoted_primary_key = nil
+ @attributes_builder = nil
+ end
+
+ private
+
+ def suppress_composite_primary_key(pk)
+ return pk unless pk.is_a?(Array)
+
+ warn <<~WARNING
+ WARNING: Active Record does not support composite primary key.
+
+ #{table_name} has composite primary key. Composite primary key is ignored.
+ WARNING
+ end
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/attribute_methods/query.rb b/activerecord/lib/active_record/attribute_methods/query.rb
index 10498f4322..6811f54b10 100644
--- a/activerecord/lib/active_record/attribute_methods/query.rb
+++ b/activerecord/lib/active_record/attribute_methods/query.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module AttributeMethods
module Query
@@ -14,8 +16,7 @@ module ActiveRecord
when true then true
when false, nil then false
else
- column = self.class.columns_hash[attr_name]
- if column.nil?
+ if !type_for_attribute(attr_name) { false }
if Numeric === value || value !~ /[^0-9]/
!value.to_i.zero?
else
diff --git a/activerecord/lib/active_record/attribute_methods/read.rb b/activerecord/lib/active_record/attribute_methods/read.rb
index ab2ecaa7c5..ffac5313ad 100644
--- a/activerecord/lib/active_record/attribute_methods/read.rb
+++ b/activerecord/lib/active_record/attribute_methods/read.rb
@@ -1,47 +1,28 @@
+# frozen_string_literal: true
+
module ActiveRecord
module AttributeMethods
module Read
extend ActiveSupport::Concern
- module ClassMethods
- protected
-
- # We want to generate the methods via module_eval rather than
- # define_method, because define_method is slower on dispatch.
- # Evaluating many similar methods may use more memory as the instruction
- # sequences are duplicated and cached (in MRI). define_method may
- # be slower on dispatch, but if you're careful about the closure
- # created, then define_method will consume much less memory.
- #
- # But sometimes the database might return columns with
- # characters that are not allowed in normal method names (like
- # 'my_column(omg)'. So to work around this we first define with
- # the __temp__ identifier, and then use alias method to rename
- # it to what we want.
- #
- # We are also defining a constant to hold the frozen string of
- # the attribute name. Using a constant means that we do not have
- # to allocate an object on each call to the attribute method.
- # Making it frozen means that it doesn't get duped when used to
- # key the @attributes in read_attribute.
- def define_method_attribute(name)
- safe_name = name.unpack('h*'.freeze).first
- temp_method = "__temp__#{safe_name}"
-
- ActiveRecord::AttributeMethods::AttrNames.set_name_cache safe_name, name
-
- generated_attribute_methods.module_eval <<-STR, __FILE__, __LINE__ + 1
- def #{temp_method}
- name = ::ActiveRecord::AttributeMethods::AttrNames::ATTR_#{safe_name}
- _read_attribute(name) { |n| missing_attribute(n, caller) }
+ module ClassMethods # :nodoc:
+ private
+
+ def define_method_attribute(name)
+ sync_with_transaction_state = "sync_with_transaction_state" if name == primary_key
+
+ ActiveModel::AttributeMethods::AttrNames.define_attribute_accessor_method(
+ generated_attribute_methods, name
+ ) do |temp_method_name, attr_name_expr|
+ generated_attribute_methods.module_eval <<-RUBY, __FILE__, __LINE__ + 1
+ def #{temp_method_name}
+ #{sync_with_transaction_state}
+ name = #{attr_name_expr}
+ _read_attribute(name) { |n| missing_attribute(n, caller) }
+ end
+ RUBY
end
- STR
-
- generated_attribute_methods.module_eval do
- alias_method name, temp_method
- undef_method temp_method
end
- end
end
# Returns the value of the attribute identified by <tt>attr_name</tt> after
@@ -49,27 +30,24 @@ module ActiveRecord
# to a date object, like Date.new(2004, 12, 12)).
def read_attribute(attr_name, &block)
name = attr_name.to_s
- name = self.class.primary_key if name == 'id'.freeze
+ if self.class.attribute_alias?(name)
+ name = self.class.attribute_alias(name)
+ end
+
+ primary_key = self.class.primary_key
+ name = primary_key if name == "id" && primary_key
+ sync_with_transaction_state if name == primary_key
_read_attribute(name, &block)
end
# This method exists to avoid the expensive primary_key check internally, without
# breaking compatibility with the read_attribute API
- if defined?(JRUBY_VERSION)
- # This form is significantly faster on JRuby, and this is one of our biggest hotspots.
- # https://github.com/jruby/jruby/pull/2562
- def _read_attribute(attr_name, &block) # :nodoc
- @attributes.fetch_value(attr_name.to_s, &block)
- end
- else
- def _read_attribute(attr_name) # :nodoc:
- @attributes.fetch_value(attr_name.to_s) { |n| yield n if block_given? }
- end
+ def _read_attribute(attr_name, &block) # :nodoc
+ @attributes.fetch_value(attr_name.to_s, &block)
end
alias :attribute :_read_attribute
private :attribute
-
end
end
end
diff --git a/activerecord/lib/active_record/attribute_methods/serialization.rb b/activerecord/lib/active_record/attribute_methods/serialization.rb
index 65978aea2a..6e0e90f39c 100644
--- a/activerecord/lib/active_record/attribute_methods/serialization.rb
+++ b/activerecord/lib/active_record/attribute_methods/serialization.rb
@@ -1,8 +1,20 @@
+# frozen_string_literal: true
+
module ActiveRecord
module AttributeMethods
module Serialization
extend ActiveSupport::Concern
+ class ColumnNotSerializableError < StandardError
+ def initialize(name, type)
+ super <<~EOS
+ Column `#{name}` of type #{type.class} does not support `serialize` feature.
+ Usually it means that you are trying to use `serialize`
+ on a column that already implements serialization natively.
+ EOS
+ end
+ end
+
module ClassMethods
# If you have an attribute that needs to be saved to the database as an
# object, and retrieved as the same object, then specify the name of that
@@ -26,7 +38,7 @@ module ActiveRecord
# ==== Parameters
#
# * +attr_name+ - The field name that should be serialized.
- # * +class_name_or_coder+ - Optional, a coder object, which responds to `.load` / `.dump`
+ # * +class_name_or_coder+ - Optional, a coder object, which responds to +.load+ and +.dump+
# or a class name that the object type should be equal to.
#
# ==== Example
@@ -50,17 +62,28 @@ module ActiveRecord
# to ensure special objects (e.g. Active Record models) are dumped correctly
# using the #as_json hook.
coder = if class_name_or_coder == ::JSON
- Coders::JSON
- elsif [:load, :dump].all? { |x| class_name_or_coder.respond_to?(x) }
- class_name_or_coder
- else
- Coders::YAMLColumn.new(class_name_or_coder)
- end
+ Coders::JSON
+ elsif [:load, :dump].all? { |x| class_name_or_coder.respond_to?(x) }
+ class_name_or_coder
+ else
+ Coders::YAMLColumn.new(attr_name, class_name_or_coder)
+ end
decorate_attribute_type(attr_name, :serialize) do |type|
+ if type_incompatible_with_serialize?(type, class_name_or_coder)
+ raise ColumnNotSerializableError.new(attr_name, type)
+ end
+
Type::Serialized.new(type, coder)
end
end
+
+ private
+
+ def type_incompatible_with_serialize?(type, class_name)
+ type.is_a?(ActiveRecord::Type::Json) && class_name == ::JSON ||
+ type.respond_to?(:type_cast_array, true) && class_name == ::Array
+ end
end
end
end
diff --git a/activerecord/lib/active_record/attribute_methods/time_zone_conversion.rb b/activerecord/lib/active_record/attribute_methods/time_zone_conversion.rb
index e160460286..294a3dc32c 100644
--- a/activerecord/lib/active_record/attribute_methods/time_zone_conversion.rb
+++ b/activerecord/lib/active_record/attribute_methods/time_zone_conversion.rb
@@ -1,4 +1,4 @@
-require 'active_support/core_ext/string/strip'
+# frozen_string_literal: true
module ActiveRecord
module AttributeMethods
@@ -26,90 +26,65 @@ module ActiveRecord
private
- def convert_time_to_time_zone(value)
- return if value.nil?
+ def convert_time_to_time_zone(value)
+ return if value.nil?
- if value.acts_like?(:time)
- value.in_time_zone
- elsif value.is_a?(::Float)
- value
- else
- map_avoiding_infinite_recursion(value) { |v| convert_time_to_time_zone(v) }
+ if value.acts_like?(:time)
+ value.in_time_zone
+ elsif value.is_a?(::Float)
+ value
+ else
+ map_avoiding_infinite_recursion(value) { |v| convert_time_to_time_zone(v) }
+ end
end
- end
- def set_time_zone_without_conversion(value)
- ::Time.zone.local_to_utc(value).in_time_zone
- end
+ def set_time_zone_without_conversion(value)
+ ::Time.zone.local_to_utc(value).try(:in_time_zone) if value
+ end
- def map_avoiding_infinite_recursion(value)
- map(value) do |v|
- if value.equal?(v)
- nil
- else
- yield(v)
+ def map_avoiding_infinite_recursion(value)
+ map(value) do |v|
+ if value.equal?(v)
+ nil
+ else
+ yield(v)
+ end
end
end
- end
end
extend ActiveSupport::Concern
included do
- mattr_accessor :time_zone_aware_attributes, instance_writer: false
- self.time_zone_aware_attributes = false
-
- class_attribute :skip_time_zone_conversion_for_attributes, instance_writer: false
- self.skip_time_zone_conversion_for_attributes = []
+ mattr_accessor :time_zone_aware_attributes, instance_writer: false, default: false
- class_attribute :time_zone_aware_types, instance_writer: false
- self.time_zone_aware_types = [:datetime, :not_explicitly_configured]
+ class_attribute :skip_time_zone_conversion_for_attributes, instance_writer: false, default: []
+ class_attribute :time_zone_aware_types, instance_writer: false, default: [ :datetime, :time ]
end
- module ClassMethods
+ module ClassMethods # :nodoc:
private
- def inherited(subclass)
- # We need to apply this decorator here, rather than on module inclusion. The closure
- # created by the matcher would otherwise evaluate for `ActiveRecord::Base`, not the
- # sub class being decorated. As such, changes to `time_zone_aware_attributes`, or
- # `skip_time_zone_conversion_for_attributes` would not be picked up.
- subclass.class_eval do
- matcher = ->(name, type) { create_time_zone_conversion_attribute?(name, type) }
- decorate_matching_attribute_types(matcher, :_time_zone_conversion) do |type|
- TimeZoneConverter.new(type)
+ def inherited(subclass)
+ super
+ # We need to apply this decorator here, rather than on module inclusion. The closure
+ # created by the matcher would otherwise evaluate for `ActiveRecord::Base`, not the
+ # sub class being decorated. As such, changes to `time_zone_aware_attributes`, or
+ # `skip_time_zone_conversion_for_attributes` would not be picked up.
+ subclass.class_eval do
+ matcher = ->(name, type) { create_time_zone_conversion_attribute?(name, type) }
+ decorate_matching_attribute_types(matcher, "_time_zone_conversion") do |type|
+ TimeZoneConverter.new(type)
+ end
end
end
- super
- end
-
- def create_time_zone_conversion_attribute?(name, cast_type)
- enabled_for_column = time_zone_aware_attributes &&
- !self.skip_time_zone_conversion_for_attributes.include?(name.to_sym)
- result = enabled_for_column &&
- time_zone_aware_types.include?(cast_type.type)
-
- if enabled_for_column &&
- !result &&
- cast_type.type == :time &&
- time_zone_aware_types.include?(:not_explicitly_configured)
- ActiveSupport::Deprecation.warn(<<-MESSAGE.strip_heredoc)
- Time columns will become time zone aware in Rails 5.1. This
- still causes `String`s to be parsed as if they were in `Time.zone`,
- and `Time`s to be converted to `Time.zone`.
- To keep the old behavior, you must add the following to your initializer:
+ def create_time_zone_conversion_attribute?(name, cast_type)
+ enabled_for_column = time_zone_aware_attributes &&
+ !skip_time_zone_conversion_for_attributes.include?(name.to_sym)
- config.active_record.time_zone_aware_types = [:datetime]
-
- To silence this deprecation warning, add the following:
-
- config.active_record.time_zone_aware_types = [:datetime, :time]
- MESSAGE
+ enabled_for_column && time_zone_aware_types.include?(cast_type.type)
end
-
- result
- end
end
end
end
diff --git a/activerecord/lib/active_record/attribute_methods/write.rb b/activerecord/lib/active_record/attribute_methods/write.rb
index 5599b590ca..455e67e19b 100644
--- a/activerecord/lib/active_record/attribute_methods/write.rb
+++ b/activerecord/lib/active_record/attribute_methods/write.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module AttributeMethods
module Write
@@ -7,53 +9,59 @@ module ActiveRecord
attribute_method_suffix "="
end
- module ClassMethods
- protected
+ module ClassMethods # :nodoc:
+ private
- def define_method_attribute=(name)
- safe_name = name.unpack('h*'.freeze).first
- ActiveRecord::AttributeMethods::AttrNames.set_name_cache safe_name, name
+ def define_method_attribute=(name)
+ sync_with_transaction_state = "sync_with_transaction_state" if name == primary_key
- generated_attribute_methods.module_eval <<-STR, __FILE__, __LINE__ + 1
- def __temp__#{safe_name}=(value)
- name = ::ActiveRecord::AttributeMethods::AttrNames::ATTR_#{safe_name}
- write_attribute(name, value)
+ ActiveModel::AttributeMethods::AttrNames.define_attribute_accessor_method(
+ generated_attribute_methods, name, writer: true,
+ ) do |temp_method_name, attr_name_expr|
+ generated_attribute_methods.module_eval <<-RUBY, __FILE__, __LINE__ + 1
+ def #{temp_method_name}(value)
+ name = #{attr_name_expr}
+ #{sync_with_transaction_state}
+ _write_attribute(name, value)
+ end
+ RUBY
end
- alias_method #{(name + '=').inspect}, :__temp__#{safe_name}=
- undef_method :__temp__#{safe_name}=
- STR
- end
+ end
end
# Updates the attribute identified by <tt>attr_name</tt> with the
- # specified +value+. Empty strings for fixnum and float columns are
+ # specified +value+. Empty strings for Integer and Float columns are
# turned into +nil+.
def write_attribute(attr_name, value)
- write_attribute_with_type_cast(attr_name, value, true)
- end
+ name = attr_name.to_s
+ if self.class.attribute_alias?(name)
+ name = self.class.attribute_alias(name)
+ end
- def raw_write_attribute(attr_name, value) # :nodoc:
- write_attribute_with_type_cast(attr_name, value, false)
+ primary_key = self.class.primary_key
+ name = primary_key if name == "id" && primary_key
+ sync_with_transaction_state if name == primary_key
+ _write_attribute(name, value)
end
- private
- # Handle *= for method_missing.
- def attribute=(attribute_name, value)
- write_attribute(attribute_name, value)
+ # This method exists to avoid the expensive primary_key check internally, without
+ # breaking compatibility with the write_attribute API
+ def _write_attribute(attr_name, value) # :nodoc:
+ @attributes.write_from_user(attr_name.to_s, value)
+ value
end
- def write_attribute_with_type_cast(attr_name, value, should_type_cast)
- attr_name = attr_name.to_s
- attr_name = self.class.primary_key if attr_name == 'id' && self.class.primary_key
-
- if should_type_cast
- @attributes.write_from_user(attr_name, value)
- else
- @attributes.write_cast_value(attr_name, value)
+ private
+ def write_attribute_without_type_cast(attr_name, value)
+ name = attr_name.to_s
+ @attributes.write_cast_value(name, value)
+ value
end
- value
- end
+ # Handle *= for method_missing.
+ def attribute=(attribute_name, value)
+ _write_attribute(attribute_name, value)
+ end
end
end
end
diff --git a/activerecord/lib/active_record/attribute_mutation_tracker.rb b/activerecord/lib/active_record/attribute_mutation_tracker.rb
deleted file mode 100644
index 0133b4d0be..0000000000
--- a/activerecord/lib/active_record/attribute_mutation_tracker.rb
+++ /dev/null
@@ -1,70 +0,0 @@
-module ActiveRecord
- class AttributeMutationTracker # :nodoc:
- def initialize(attributes)
- @attributes = attributes
- end
-
- def changed_values
- attr_names.each_with_object({}.with_indifferent_access) do |attr_name, result|
- if changed?(attr_name)
- result[attr_name] = attributes[attr_name].original_value
- end
- end
- end
-
- def changes
- attr_names.each_with_object({}.with_indifferent_access) do |attr_name, result|
- if changed?(attr_name)
- result[attr_name] = [attributes[attr_name].original_value, attributes.fetch_value(attr_name)]
- end
- end
- end
-
- def changed?(attr_name)
- attr_name = attr_name.to_s
- attributes[attr_name].changed?
- end
-
- def changed_in_place?(attr_name)
- attributes[attr_name].changed_in_place?
- end
-
- def forget_change(attr_name)
- attr_name = attr_name.to_s
- attributes[attr_name] = attributes[attr_name].forgetting_assignment
- end
-
- protected
-
- attr_reader :attributes
-
- private
-
- def attr_names
- attributes.keys
- end
- end
-
- class NullMutationTracker # :nodoc:
- include Singleton
-
- def changed_values
- {}
- end
-
- def changes
- {}
- end
-
- def changed?(*)
- false
- end
-
- def changed_in_place?(*)
- false
- end
-
- def forget_change(*)
- end
- end
-end
diff --git a/activerecord/lib/active_record/attribute_set.rb b/activerecord/lib/active_record/attribute_set.rb
deleted file mode 100644
index be581ac2a9..0000000000
--- a/activerecord/lib/active_record/attribute_set.rb
+++ /dev/null
@@ -1,108 +0,0 @@
-require 'active_record/attribute_set/builder'
-
-module ActiveRecord
- class AttributeSet # :nodoc:
- def initialize(attributes)
- @attributes = attributes
- end
-
- def [](name)
- attributes[name] || Attribute.null(name)
- end
-
- def []=(name, value)
- attributes[name] = value
- end
-
- def values_before_type_cast
- attributes.transform_values(&:value_before_type_cast)
- end
-
- def to_hash
- initialized_attributes.transform_values(&:value)
- end
- alias_method :to_h, :to_hash
-
- def key?(name)
- attributes.key?(name) && self[name].initialized?
- end
-
- def keys
- attributes.each_key.select { |name| self[name].initialized? }
- end
-
- if defined?(JRUBY_VERSION)
- # This form is significantly faster on JRuby, and this is one of our biggest hotspots.
- # https://github.com/jruby/jruby/pull/2562
- def fetch_value(name, &block)
- self[name].value(&block)
- end
- else
- def fetch_value(name)
- self[name].value { |n| yield n if block_given? }
- end
- end
-
- def write_from_database(name, value)
- attributes[name] = self[name].with_value_from_database(value)
- end
-
- def write_from_user(name, value)
- attributes[name] = self[name].with_value_from_user(value)
- end
-
- def write_cast_value(name, value)
- attributes[name] = self[name].with_cast_value(value)
- end
-
- def freeze
- @attributes.freeze
- super
- end
-
- def deep_dup
- dup.tap do |copy|
- copy.instance_variable_set(:@attributes, attributes.deep_dup)
- end
- end
-
- def initialize_dup(_)
- @attributes = attributes.dup
- super
- end
-
- def initialize_clone(_)
- @attributes = attributes.clone
- super
- end
-
- def reset(key)
- if key?(key)
- write_from_database(key, nil)
- end
- end
-
- def accessed
- attributes.select { |_, attr| attr.has_been_read? }.keys
- end
-
- def map(&block)
- new_attributes = attributes.transform_values(&block)
- AttributeSet.new(new_attributes)
- end
-
- def ==(other)
- attributes == other.attributes
- end
-
- protected
-
- attr_reader :attributes
-
- private
-
- def initialized_attributes
- attributes.select { |_, attr| attr.initialized? }
- end
- end
-end
diff --git a/activerecord/lib/active_record/attribute_set/builder.rb b/activerecord/lib/active_record/attribute_set/builder.rb
deleted file mode 100644
index 3bd7c7997b..0000000000
--- a/activerecord/lib/active_record/attribute_set/builder.rb
+++ /dev/null
@@ -1,108 +0,0 @@
-require 'active_record/attribute'
-
-module ActiveRecord
- class AttributeSet # :nodoc:
- class Builder # :nodoc:
- attr_reader :types, :always_initialized
-
- def initialize(types, always_initialized = nil)
- @types = types
- @always_initialized = always_initialized
- end
-
- def build_from_database(values = {}, additional_types = {})
- if always_initialized && !values.key?(always_initialized)
- values[always_initialized] = nil
- end
-
- attributes = LazyAttributeHash.new(types, values, additional_types)
- AttributeSet.new(attributes)
- end
- end
- end
-
- class LazyAttributeHash # :nodoc:
- delegate :transform_values, :each_key, to: :materialize
-
- def initialize(types, values, additional_types)
- @types = types
- @values = values
- @additional_types = additional_types
- @materialized = false
- @delegate_hash = {}
- end
-
- def key?(key)
- delegate_hash.key?(key) || values.key?(key) || types.key?(key)
- end
-
- def [](key)
- delegate_hash[key] || assign_default_value(key)
- end
-
- def []=(key, value)
- if frozen?
- raise RuntimeError, "Can't modify frozen hash"
- end
- delegate_hash[key] = value
- end
-
- def deep_dup
- dup.tap do |copy|
- copy.instance_variable_set(:@delegate_hash, delegate_hash.transform_values(&:dup))
- end
- end
-
- def initialize_dup(_)
- @delegate_hash = Hash[delegate_hash]
- super
- end
-
- def select
- keys = types.keys | values.keys | delegate_hash.keys
- keys.each_with_object({}) do |key, hash|
- attribute = self[key]
- if yield(key, attribute)
- hash[key] = attribute
- end
- end
- end
-
- def ==(other)
- if other.is_a?(LazyAttributeHash)
- materialize == other.materialize
- else
- materialize == other
- end
- end
-
- protected
-
- attr_reader :types, :values, :additional_types, :delegate_hash
-
- def materialize
- unless @materialized
- values.each_key { |key| self[key] }
- types.each_key { |key| self[key] }
- unless frozen?
- @materialized = true
- end
- end
- delegate_hash
- end
-
- private
-
- def assign_default_value(name)
- type = additional_types.fetch(name, types[name])
- value_present = true
- value = values.fetch(name) { value_present = false }
-
- if value_present
- delegate_hash[name] = Attribute.from_database(name, value, type)
- elsif types.key?(name)
- delegate_hash[name] = Attribute.uninitialized(name, type)
- end
- end
- end
-end
diff --git a/activerecord/lib/active_record/attributes.rb b/activerecord/lib/active_record/attributes.rb
index e0ceafc617..7cf421c184 100644
--- a/activerecord/lib/active_record/attributes.rb
+++ b/activerecord/lib/active_record/attributes.rb
@@ -1,4 +1,6 @@
-require 'active_record/attribute/user_provided_default'
+# frozen_string_literal: true
+
+require "active_model/attribute/user_provided_default"
module ActiveRecord
# See ActiveRecord::Attributes::ClassMethods for documentation
@@ -6,8 +8,7 @@ module ActiveRecord
extend ActiveSupport::Concern
included do
- class_attribute :attributes_to_define_after_schema_loads, instance_accessor: false # :internal:
- self.attributes_to_define_after_schema_loads = {}
+ class_attribute :attributes_to_define_after_schema_loads, instance_accessor: false, default: {} # :internal:
end
module ClassMethods
@@ -34,12 +35,15 @@ module ActiveRecord
# is not passed, the previous default value (if any) will be used.
# Otherwise, the default will be +nil+.
#
- # +array+ (PG only) specifies that the type should be an array (see the
+ # +array+ (PostgreSQL only) specifies that the type should be an array (see the
# examples below).
#
- # +range+ (PG only) specifies that the type should be a range (see the
+ # +range+ (PostgreSQL only) specifies that the type should be a range (see the
# examples below).
#
+ # When using a symbol for +cast_type+, extra options are forwarded to the
+ # constructor of the type object.
+ #
# ==== Examples
#
# The type detected by Active Record can be overridden.
@@ -56,7 +60,7 @@ module ActiveRecord
# store_listing = StoreListing.new(price_in_cents: '10.1')
#
# # before
- # store_listing.price_in_cents # => BigDecimal.new(10.1)
+ # store_listing.price_in_cents # => BigDecimal(10.1)
#
# class StoreListing < ActiveRecord::Base
# attribute :price_in_cents, :integer
@@ -67,12 +71,14 @@ module ActiveRecord
#
# A default can also be provided.
#
+ # # db/schema.rb
# create_table :store_listings, force: true do |t|
# t.string :my_string, default: "original default"
# end
#
# StoreListing.new.my_string # => "original default"
#
+ # # app/models/store_listing.rb
# class StoreListing < ActiveRecord::Base
# attribute :my_string, :string, default: "new default"
# end
@@ -89,6 +95,7 @@ module ActiveRecord
#
# \Attributes do not need to be backed by a database column.
#
+ # # app/models/my_model.rb
# class MyModel < ActiveRecord::Base
# attribute :my_string, :string
# attribute :my_int_array, :integer, array: true
@@ -108,12 +115,22 @@ module ActiveRecord
# my_float_range: 1.0..3.5
# }
#
+ # Passing options to the type constructor
+ #
+ # # app/models/my_model.rb
+ # class MyModel < ActiveRecord::Base
+ # attribute :small_int, :integer, limit: 2
+ # end
+ #
+ # MyModel.create(small_int: 65537)
+ # # => Error: 65537 is out of range for the limit of two bytes
+ #
# ==== Creating Custom Types
#
# Users may also define their own custom types, as long as they respond
# to the methods defined on the value type. The method +deserialize+ or
# +cast+ will be called on your type object, with raw input from the
- # database or from your controllers. See ActiveRecord::Type::Value for the
+ # database or from your controllers. See ActiveModel::Type::Value for the
# expected API. It is recommended that your type objects inherit from an
# existing type, or from ActiveRecord::Type::Value
#
@@ -131,7 +148,7 @@ module ActiveRecord
# # config/initializers/types.rb
# ActiveRecord::Type.register(:money, MoneyType)
#
- # # /app/models/store_listing.rb
+ # # app/models/store_listing.rb
# class StoreListing < ActiveRecord::Base
# attribute :price_in_cents, :money
# end
@@ -140,7 +157,7 @@ module ActiveRecord
# store_listing.price_in_cents # => 1000
#
# For more details on creating custom types, see the documentation for
- # ActiveRecord::Type::Value. For more details on registering your types
+ # ActiveModel::Type::Value. For more details on registering your types
# to be referenced by a symbol, see ActiveRecord::Type.register. You can
# also pass a type object directly, in place of a symbol.
#
@@ -167,8 +184,10 @@ module ActiveRecord
# end
# end
#
+ # # config/initializers/types.rb
# ActiveRecord::Type.register(:money, MoneyType)
#
+ # # app/models/product.rb
# class Product < ActiveRecord::Base
# currency_converter = ConversionRatesFromTheInternet.new
# attribute :price_in_bitcoins, :money, currency_converter: currency_converter
@@ -185,8 +204,8 @@ module ActiveRecord
# The type of an attribute is given the opportunity to change how dirty
# tracking is performed. The methods +changed?+ and +changed_in_place?+
# will be called from ActiveModel::Dirty. See the documentation for those
- # methods in ActiveRecord::Type::Value for more details.
- def attribute(name, cast_type, **options)
+ # methods in ActiveModel::Type::Value for more details.
+ def attribute(name, cast_type = Type::Value.new, **options)
name = name.to_s
reload_schema_from_cache
@@ -237,24 +256,24 @@ module ActiveRecord
private
- NO_DEFAULT_PROVIDED = Object.new # :nodoc:
- private_constant :NO_DEFAULT_PROVIDED
+ NO_DEFAULT_PROVIDED = Object.new # :nodoc:
+ private_constant :NO_DEFAULT_PROVIDED
- def define_default_attribute(name, value, type, from_user:)
- if value == NO_DEFAULT_PROVIDED
- default_attribute = _default_attributes[name].with_type(type)
- elsif from_user
- default_attribute = Attribute::UserProvidedDefault.new(
- name,
- value,
- type,
- _default_attributes[name],
- )
- else
- default_attribute = Attribute.from_database(name, value, type)
+ def define_default_attribute(name, value, type, from_user:)
+ if value == NO_DEFAULT_PROVIDED
+ default_attribute = _default_attributes[name].with_type(type)
+ elsif from_user
+ default_attribute = ActiveModel::Attribute::UserProvidedDefault.new(
+ name,
+ value,
+ type,
+ _default_attributes.fetch(name.to_s) { nil },
+ )
+ else
+ default_attribute = ActiveModel::Attribute.from_database(name, value, type)
+ end
+ _default_attributes[name] = default_attribute
end
- _default_attributes[name] = default_attribute
- end
end
end
end
diff --git a/activerecord/lib/active_record/autosave_association.rb b/activerecord/lib/active_record/autosave_association.rb
index 06c7482bf9..fe94662543 100644
--- a/activerecord/lib/active_record/autosave_association.rb
+++ b/activerecord/lib/active_record/autosave_association.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
# = Active Record Autosave Association
#
@@ -140,24 +142,23 @@ module ActiveRecord
included do
Associations::Builder::Association.extensions << AssociationBuilderExtension
- mattr_accessor :index_nested_attribute_errors, instance_writer: false
- self.index_nested_attribute_errors = false
+ mattr_accessor :index_nested_attribute_errors, instance_writer: false, default: false
end
module ClassMethods # :nodoc:
private
def define_non_cyclic_method(name, &block)
- return if method_defined?(name)
+ return if instance_methods(false).include?(name)
define_method(name) do |*args|
result = true; @_already_called ||= {}
# Loop prevention for validation of associations
unless @_already_called[name]
begin
- @_already_called[name]=true
+ @_already_called[name] = true
result = instance_eval(&block)
ensure
- @_already_called[name]=false
+ @_already_called[name] = false
end
end
@@ -181,6 +182,7 @@ module ActiveRecord
if reflection.collection?
before_save :before_save_collection_association
+ after_save :after_save_collection_association
define_non_cyclic_method(save_method) { save_collection_association(reflection) }
# Doesn't use after_save as that would save associations added in after_create/after_update twice
@@ -215,13 +217,7 @@ module ActiveRecord
method = :validate_single_association
end
- define_non_cyclic_method(validation_method) do
- send(method, reflection)
- # TODO: remove the following line as soon as the return value of
- # callbacks is ignored, that is, returning `false` does not
- # display a deprecation warning or halts the callback chain.
- true
- end
+ define_non_cyclic_method(validation_method) { send(method, reflection) }
validate validation_method
after_validation :_ensure_no_duplicate_errors
end
@@ -267,7 +263,7 @@ module ActiveRecord
# Returns whether or not this record has been changed in any way (including whether
# any of its nested autosave associations are likewise changed)
def changed_for_autosave?
- new_record? || changed? || marked_for_destruction? || nested_records_changed_for_autosave?
+ new_record? || has_changes_to_save? || marked_for_destruction? || nested_records_changed_for_autosave?
end
private
@@ -325,30 +321,24 @@ module ActiveRecord
# Returns whether or not the association is valid and applies any errors to
# the parent, <tt>self</tt>, if it wasn't. Skips any <tt>:autosave</tt>
# enabled records if they're marked_for_destruction? or destroyed.
- def association_valid?(reflection, record, index=nil)
+ def association_valid?(reflection, record, index = nil)
return true if record.destroyed? || (reflection.options[:autosave] && record.marked_for_destruction?)
- validation_context = self.validation_context unless [:create, :update].include?(self.validation_context)
- unless valid = record.valid?(validation_context)
+ context = validation_context unless [:create, :update].include?(validation_context)
+
+ unless valid = record.valid?(context)
if reflection.options[:autosave]
indexed_attribute = !index.nil? && (reflection.options[:index_errors] || ActiveRecord::Base.index_nested_attribute_errors)
record.errors.each do |attribute, message|
- if indexed_attribute
- attribute = "#{reflection.name}[#{index}].#{attribute}"
- else
- attribute = "#{reflection.name}.#{attribute}"
- end
+ attribute = normalize_reflection_attribute(indexed_attribute, reflection, index, attribute)
errors[attribute] << message
errors[attribute].uniq!
end
record.errors.details.each_key do |attribute|
- if indexed_attribute
- reflection_attribute = "#{reflection.name}[#{index}].#{attribute}"
- else
- reflection_attribute = "#{reflection.name}.#{attribute}"
- end
+ reflection_attribute =
+ normalize_reflection_attribute(indexed_attribute, reflection, index, attribute).to_sym
record.errors.details[attribute].each do |error|
errors.details[reflection_attribute] << error
@@ -362,11 +352,22 @@ module ActiveRecord
valid
end
+ def normalize_reflection_attribute(indexed_attribute, reflection, index, attribute)
+ if indexed_attribute
+ "#{reflection.name}[#{index}].#{attribute}"
+ else
+ "#{reflection.name}.#{attribute}"
+ end
+ end
+
# Is used as a before_save callback to check while saving a collection
# association whether or not the parent was a new record before saving.
def before_save_collection_association
@new_record_before_save = new_record?
- true
+ end
+
+ def after_save_collection_association
+ @new_record_before_save = false
end
# Saves any new associated records, or all loaded autosave associations if
@@ -381,6 +382,9 @@ module ActiveRecord
if association = association_instance_get(reflection.name)
autosave = reflection.options[:autosave]
+ # reconstruct the scope now that we know the owner's id
+ association.reset_scope
+
if records = associated_records_to_validate_or_save(association, @new_record_before_save, autosave)
if autosave
records_to_destroy = records.select(&:marked_for_destruction?)
@@ -396,19 +400,21 @@ module ActiveRecord
if autosave != false && (@new_record_before_save || record.new_record?)
if autosave
saved = association.insert_record(record, false)
- else
- association.insert_record(record) unless reflection.nested?
+ elsif !reflection.nested?
+ association_saved = association.insert_record(record)
+
+ if reflection.validate?
+ errors.add(reflection.name) unless association_saved
+ saved = association_saved
+ end
end
elsif autosave
- saved = record.save(:validate => false)
+ saved = record.save(validate: false)
end
raise ActiveRecord::Rollback unless saved
end
end
-
- # reconstruct the scope now that we know the owner's id
- association.reset_scope if association.respond_to?(:reset_scope)
end
end
@@ -435,9 +441,12 @@ module ActiveRecord
if (autosave && record.changed_for_autosave?) || new_record? || record_changed?(reflection, record, key)
unless reflection.through_reflection
record[reflection.foreign_key] = key
+ if inverse_reflection = reflection.inverse_of
+ record.association(inverse_reflection.name).loaded!
+ end
end
- saved = record.save(:validate => !autosave)
+ saved = record.save(validate: !autosave)
raise ActiveRecord::Rollback if !saved && autosave
saved
end
@@ -448,8 +457,14 @@ module ActiveRecord
# If the record is new or it has changed, returns true.
def record_changed?(reflection, record, key)
record.new_record? ||
- (record.has_attribute?(reflection.foreign_key) && record[reflection.foreign_key] != key) ||
- record.attribute_changed?(reflection.foreign_key)
+ association_foreign_key_changed?(reflection, record, key) ||
+ record.will_save_change_to_attribute?(reflection.foreign_key)
+ end
+
+ def association_foreign_key_changed?(reflection, record, key)
+ return false if reflection.through_reflection?
+
+ record.has_attribute?(reflection.foreign_key) && record[reflection.foreign_key] != key
end
# Saves the associated record if it's new or <tt>:autosave</tt> is enabled.
@@ -457,7 +472,9 @@ module ActiveRecord
# In addition, it will destroy the association if it was marked for destruction.
def save_belongs_to_association(reflection)
association = association_instance_get(reflection.name)
- record = association && association.load_target
+ return unless association && association.loaded? && !association.stale_target?
+
+ record = association.load_target
if record && !record.destroyed?
autosave = reflection.options[:autosave]
@@ -465,7 +482,7 @@ module ActiveRecord
self[reflection.foreign_key] = nil
record.destroy
elsif autosave != false
- saved = record.save(:validate => !autosave) if record.new_record? || (autosave && record.changed_for_autosave?)
+ saved = record.save(validate: !autosave) if record.new_record? || (autosave && record.changed_for_autosave?)
if association.updated?
association_id = record.send(reflection.options[:primary_key] || :id)
diff --git a/activerecord/lib/active_record/base.rb b/activerecord/lib/active_record/base.rb
index 6a1a27ce41..db097cb930 100644
--- a/activerecord/lib/active_record/base.rb
+++ b/activerecord/lib/active_record/base.rb
@@ -1,25 +1,28 @@
-require 'yaml'
-require 'active_support/benchmarkable'
-require 'active_support/dependencies'
-require 'active_support/descendants_tracker'
-require 'active_support/time'
-require 'active_support/core_ext/module/attribute_accessors'
-require 'active_support/core_ext/array/extract_options'
-require 'active_support/core_ext/hash/deep_merge'
-require 'active_support/core_ext/hash/slice'
-require 'active_support/core_ext/hash/transform_values'
-require 'active_support/core_ext/string/behavior'
-require 'active_support/core_ext/kernel/singleton_class'
-require 'active_support/core_ext/module/introspection'
-require 'active_support/core_ext/object/duplicable'
-require 'active_support/core_ext/class/subclasses'
-require 'active_record/attribute_decorators'
-require 'active_record/errors'
-require 'active_record/log_subscriber'
-require 'active_record/explain_subscriber'
-require 'active_record/relation/delegation'
-require 'active_record/attributes'
-require 'active_record/type_caster'
+# frozen_string_literal: true
+
+require "yaml"
+require "active_support/benchmarkable"
+require "active_support/dependencies"
+require "active_support/descendants_tracker"
+require "active_support/time"
+require "active_support/core_ext/module/attribute_accessors"
+require "active_support/core_ext/array/extract_options"
+require "active_support/core_ext/hash/deep_merge"
+require "active_support/core_ext/hash/slice"
+require "active_support/core_ext/string/behavior"
+require "active_support/core_ext/kernel/singleton_class"
+require "active_support/core_ext/module/introspection"
+require "active_support/core_ext/object/duplicable"
+require "active_support/core_ext/class/subclasses"
+require "active_record/attribute_decorators"
+require "active_record/define_callbacks"
+require "active_record/errors"
+require "active_record/log_subscriber"
+require "active_record/explain_subscriber"
+require "active_record/relation/delegation"
+require "active_record/attributes"
+require "active_record/type_caster"
+require "active_record/database_configurations"
module ActiveRecord #:nodoc:
# = Active Record
@@ -286,6 +289,7 @@ module ActiveRecord #:nodoc:
extend Enum
extend Delegation::DelegateCache
extend CollectionCacheKey
+ extend Aggregations::ClassMethods
include Core
include Persistence
@@ -303,6 +307,7 @@ module ActiveRecord #:nodoc:
include AttributeDecorators
include Locking::Optimistic
include Locking::Pessimistic
+ include DefineCallbacks
include AttributeMethods
include Callbacks
include Timestamp
@@ -310,10 +315,9 @@ module ActiveRecord #:nodoc:
include ActiveModel::SecurePassword
include AutosaveAssociation
include NestedAttributes
- include Aggregations
include Transactions
- include NoTouching
include TouchLater
+ include NoTouching
include Reflection
include Serialization
include Store
diff --git a/activerecord/lib/active_record/callbacks.rb b/activerecord/lib/active_record/callbacks.rb
index 1f1b11eb68..ef5444dfc3 100644
--- a/activerecord/lib/active_record/callbacks.rb
+++ b/activerecord/lib/active_record/callbacks.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
# = Active Record \Callbacks
#
@@ -53,9 +55,9 @@ module ActiveRecord
# end
#
# class Firm < ActiveRecord::Base
- # # Destroys the associated clients and people when the firm is destroyed
- # before_destroy { |record| Person.destroy_all "firm_id = #{record.id}" }
- # before_destroy { |record| Client.destroy_all "client_of = #{record.id}" }
+ # # Disables access to the system, for associated clients and people when the firm is destroyed
+ # before_destroy { |record| Person.where(firm_id: record.id).update_all(access: 'disabled') }
+ # before_destroy { |record| Client.where(client_of: record.id).update_all(access: 'disabled') }
# end
#
# == Inheritable callback queues
@@ -73,21 +75,7 @@ module ActiveRecord
# end
#
# Now, when <tt>Topic#destroy</tt> is run only +destroy_author+ is called. When <tt>Reply#destroy</tt> is
- # run, both +destroy_author+ and +destroy_readers+ are called. Contrast this to the following situation
- # where the +before_destroy+ method is overridden:
- #
- # class Topic < ActiveRecord::Base
- # def before_destroy() destroy_author end
- # end
- #
- # class Reply < Topic
- # def before_destroy() destroy_readers end
- # end
- #
- # In that case, <tt>Reply#destroy</tt> would only run +destroy_readers+ and _not_ +destroy_author+.
- # So, use the callback macros when you want to ensure that a certain callback is called for the entire
- # hierarchy, and use the regular overwritable methods when you want to leave it up to each descendant
- # to decide whether they want to call +super+ and trigger the inherited callbacks.
+ # run, both +destroy_author+ and +destroy_readers+ are called.
#
# *IMPORTANT:* In order for inheritance to work for the callback queues, you must specify the
# callbacks before specifying the associations. Otherwise, you might trigger the loading of a
@@ -96,9 +84,9 @@ module ActiveRecord
# == Types of callbacks
#
# There are four types of callbacks accepted by the callback macros: Method references (symbol), callback objects,
- # inline methods (using a proc), and inline eval methods (using a string). Method references and callback objects
+ # inline methods (using a proc). Method references and callback objects
# are the recommended approaches, inline methods using a proc are sometimes appropriate (such as for
- # creating mix-ins), and inline eval methods are deprecated.
+ # creating mix-ins).
#
# The method reference callbacks work by specifying a protected or private method available in the object, like this:
#
@@ -107,7 +95,7 @@ module ActiveRecord
#
# private
# def delete_parents
- # self.class.delete_all "parent_id = #{id}"
+ # self.class.delete_by(parent_id: id)
# end
# end
#
@@ -140,7 +128,7 @@ module ActiveRecord
# end
# end
#
- # So you specify the object you want messaged on a given callback. When that callback is triggered, the object has
+ # So you specify the object you want to be messaged on a given callback. When that callback is triggered, the object has
# a method by the name of the callback messaged. You can make these callbacks more flexible by passing in other
# initialization data such as the name of the attribute to work with:
#
@@ -225,6 +213,55 @@ module ActiveRecord
#
# This way, the +before_destroy+ gets executed before the <tt>dependent: :destroy</tt> is called, and the data is still available.
#
+ # Also, there are cases when you want several callbacks of the same type to
+ # be executed in order.
+ #
+ # For example:
+ #
+ # class Topic < ActiveRecord::Base
+ # has_many :children
+ #
+ # after_save :log_children
+ # after_save :do_something_else
+ #
+ # private
+ #
+ # def log_children
+ # # Child processing
+ # end
+ #
+ # def do_something_else
+ # # Something else
+ # end
+ # end
+ #
+ # In this case the +log_children+ gets executed before +do_something_else+.
+ # The same applies to all non-transactional callbacks.
+ #
+ # In case there are multiple transactional callbacks as seen below, the order
+ # is reversed.
+ #
+ # For example:
+ #
+ # class Topic < ActiveRecord::Base
+ # has_many :children
+ #
+ # after_commit :log_children
+ # after_commit :do_something_else
+ #
+ # private
+ #
+ # def log_children
+ # # Child processing
+ # end
+ #
+ # def do_something_else
+ # # Something else
+ # end
+ # end
+ #
+ # In this case the +do_something_else+ gets executed before +log_children+.
+ #
# == \Transactions
#
# The entire callback chain of a {#save}[rdoc-ref:Persistence#save], {#save!}[rdoc-ref:Persistence#save!],
@@ -265,17 +302,6 @@ module ActiveRecord
:before_destroy, :around_destroy, :after_destroy, :after_commit, :after_rollback
]
- module ClassMethods # :nodoc:
- include ActiveModel::Callbacks
- end
-
- included do
- include ActiveModel::Validations::Callbacks
-
- define_model_callbacks :initialize, :find, :touch, :only => :after
- define_model_callbacks :save, :create, :update, :destroy
- end
-
def destroy #:nodoc:
@_destroy_callback_already_called ||= false
return if @_destroy_callback_already_called
@@ -292,17 +318,21 @@ module ActiveRecord
_run_touch_callbacks { super }
end
+ def increment!(attribute, by = 1, touch: nil) # :nodoc:
+ touch ? _run_touch_callbacks { super } : super
+ end
+
private
- def create_or_update(*) #:nodoc:
+ def create_or_update(**)
_run_save_callbacks { super }
end
- def _create_record #:nodoc:
+ def _create_record
_run_create_callbacks { super }
end
- def _update_record(*) #:nodoc:
+ def _update_record
_run_update_callbacks { super }
end
end
diff --git a/activerecord/lib/active_record/coders/json.rb b/activerecord/lib/active_record/coders/json.rb
index 75d3bfe625..a69b38487e 100644
--- a/activerecord/lib/active_record/coders/json.rb
+++ b/activerecord/lib/active_record/coders/json.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Coders # :nodoc:
class JSON # :nodoc:
@@ -6,7 +8,7 @@ module ActiveRecord
end
def self.load(json)
- ActiveSupport::JSON.decode(json) unless json.nil?
+ ActiveSupport::JSON.decode(json) unless json.blank?
end
end
end
diff --git a/activerecord/lib/active_record/coders/yaml_column.rb b/activerecord/lib/active_record/coders/yaml_column.rb
index 2456b8ad8c..11559141c7 100644
--- a/activerecord/lib/active_record/coders/yaml_column.rb
+++ b/activerecord/lib/active_record/coders/yaml_column.rb
@@ -1,12 +1,14 @@
-require 'yaml'
+# frozen_string_literal: true
+
+require "yaml"
module ActiveRecord
module Coders # :nodoc:
class YAMLColumn # :nodoc:
-
attr_accessor :object_class
- def initialize(object_class = Object)
+ def initialize(attr_name, object_class = Object)
+ @attr_name = attr_name
@object_class = object_class
check_arity_of_constructor
end
@@ -14,37 +16,35 @@ module ActiveRecord
def dump(obj)
return if obj.nil?
- assert_valid_value(obj)
+ assert_valid_value(obj, action: "dump")
YAML.dump obj
end
def load(yaml)
return object_class.new if object_class != Object && yaml.nil?
- return yaml unless yaml.is_a?(String) && yaml =~ /^---/
+ return yaml unless yaml.is_a?(String) && /^---/.match?(yaml)
obj = YAML.load(yaml)
- assert_valid_value(obj)
+ assert_valid_value(obj, action: "load")
obj ||= object_class.new if object_class != Object
obj
end
- def assert_valid_value(obj)
+ def assert_valid_value(obj, action:)
unless obj.nil? || obj.is_a?(object_class)
raise SerializationTypeMismatch,
- "Attribute was supposed to be a #{object_class}, but was a #{obj.class}. -- #{obj.inspect}"
+ "can't #{action} `#{@attr_name}`: was supposed to be a #{object_class}, but was a #{obj.class}. -- #{obj.inspect}"
end
end
private
- def check_arity_of_constructor
- begin
+ def check_arity_of_constructor
load(nil)
rescue ArgumentError
raise ArgumentError, "Cannot serialize #{object_class}. Classes passed to `serialize` must have a 0 argument constructor."
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/collection_cache_key.rb b/activerecord/lib/active_record/collection_cache_key.rb
index 5dcc98424a..4b6db8a96c 100644
--- a/activerecord/lib/active_record/collection_cache_key.rb
+++ b/activerecord/lib/active_record/collection_cache_key.rb
@@ -1,24 +1,37 @@
+# frozen_string_literal: true
+
module ActiveRecord
module CollectionCacheKey
-
def collection_cache_key(collection = all, timestamp_column = :updated_at) # :nodoc:
- query_signature = Digest::MD5.hexdigest(collection.to_sql)
+ query_signature = ActiveSupport::Digest.hexdigest(collection.to_sql)
key = "#{collection.model_name.cache_key}/query-#{query_signature}"
- if collection.loaded?
- size = collection.size
+ if collection.loaded? || collection.distinct_value
+ size = collection.records.size
if size > 0
- timestamp = collection.max_by(&timestamp_column).public_send(timestamp_column)
+ timestamp = collection.max_by(&timestamp_column)._read_attribute(timestamp_column)
end
else
- column_type = type_for_attribute(timestamp_column.to_s)
- column = "#{connection.quote_table_name(collection.table_name)}.#{connection.quote_column_name(timestamp_column)}"
+ if collection.eager_loading?
+ collection = collection.send(:apply_join_dependency)
+ end
+ column_type = type_for_attribute(timestamp_column)
+ column = connection.visitor.compile(collection.arel_attribute(timestamp_column))
+ select_values = "COUNT(*) AS #{connection.quote_column_name("size")}, MAX(%s) AS timestamp"
+
+ if collection.has_limit_or_offset?
+ query = collection.select("#{column} AS collection_cache_key_timestamp")
+ subquery_alias = "subquery_for_cache_key"
+ subquery_column = "#{subquery_alias}.collection_cache_key_timestamp"
+ subquery = query.arel.as(subquery_alias)
+ arel = Arel::SelectManager.new(subquery).project(select_values % subquery_column)
+ else
+ query = collection.unscope(:order)
+ query.select_values = [select_values % column]
+ arel = query.arel
+ end
- query = collection
- .unscope(:select)
- .select("COUNT(*) AS size", "MAX(#{column}) AS timestamp")
- .unscope(:order)
- result = connection.select_one(query)
+ result = connection.select_one(arel, nil)
if result.blank?
size = 0
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb b/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb
index e389d818fd..0ded1a5318 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb
@@ -1,6 +1,8 @@
-require 'thread'
-require 'concurrent/map'
-require 'monitor'
+# frozen_string_literal: true
+
+require "thread"
+require "concurrent/map"
+require "monitor"
module ActiveRecord
# Raised when a connection could not be obtained within the connection
@@ -61,30 +63,25 @@ module ActiveRecord
# There are several connection-pooling-related options that you can add to
# your database connection configuration:
#
- # * +pool+: number indicating size of connection pool (default 5)
- # * +checkout_timeout+: number of seconds to block and wait for a connection
- # before giving up and raising a timeout error (default 5 seconds).
- # * +reaping_frequency+: frequency in seconds to periodically run the
- # Reaper, which attempts to find and recover connections from dead
- # threads, which can occur if a programmer forgets to close a
- # connection at the end of a thread or a thread dies unexpectedly.
- # Regardless of this setting, the Reaper will be invoked before every
- # blocking wait. (Default nil, which means don't schedule the Reaper).
+ # * +pool+: maximum number of connections the pool may manage (default 5).
+ # * +idle_timeout+: number of seconds that a connection will be kept
+ # unused in the pool before it is automatically disconnected (default
+ # 300 seconds). Set this to zero to keep connections forever.
+ # * +checkout_timeout+: number of seconds to wait for a connection to
+ # become available before giving up and raising a timeout error (default
+ # 5 seconds).
#
#--
# Synchronization policy:
# * all public methods can be called outside +synchronize+
- # * access to these i-vars needs to be in +synchronize+:
+ # * access to these instance variables needs to be in +synchronize+:
# * @connections
# * @now_connecting
# * private methods that require being called in a +synchronize+ blocks
# are now explicitly documented
class ConnectionPool
- # Threadsafe, fair, FIFO queue. Meant to be used by ConnectionPool
- # with which it shares a Monitor. But could be a generic Queue.
- #
- # The Queue in stdlib's 'thread' could replace this class except
- # stdlib's doesn't support waiting with a timeout.
+ # Threadsafe, fair, LIFO queue. Meant to be used by ConnectionPool
+ # with which it shares a Monitor.
class Queue
def initialize(lock = Monitor.new)
@lock = lock
@@ -116,7 +113,7 @@ module ActiveRecord
end
end
- # If +element+ is in the queue, remove and return it, or nil.
+ # If +element+ is in the queue, remove and return it, or +nil+.
def delete(element)
synchronize do
@queue.delete(element)
@@ -135,7 +132,7 @@ module ActiveRecord
# If +timeout+ is not given, remove and return the head the
# queue if the number of available elements is strictly
# greater than the number of threads currently waiting (that
- # is, don't jump ahead in line). Otherwise, return nil.
+ # is, don't jump ahead in line). Otherwise, return +nil+.
#
# If +timeout+ is given, block if there is no element
# available, waiting up to +timeout+ seconds for an element to
@@ -150,61 +147,63 @@ module ActiveRecord
private
- def internal_poll(timeout)
- no_wait_poll || (timeout && wait_poll(timeout))
- end
+ def internal_poll(timeout)
+ no_wait_poll || (timeout && wait_poll(timeout))
+ end
- def synchronize(&block)
- @lock.synchronize(&block)
- end
+ def synchronize(&block)
+ @lock.synchronize(&block)
+ end
- # Test if the queue currently contains any elements.
- def any?
- !@queue.empty?
- end
+ # Test if the queue currently contains any elements.
+ def any?
+ !@queue.empty?
+ end
- # A thread can remove an element from the queue without
- # waiting if and only if the number of currently available
- # connections is strictly greater than the number of waiting
- # threads.
- def can_remove_no_wait?
- @queue.size > @num_waiting
- end
+ # A thread can remove an element from the queue without
+ # waiting if and only if the number of currently available
+ # connections is strictly greater than the number of waiting
+ # threads.
+ def can_remove_no_wait?
+ @queue.size > @num_waiting
+ end
- # Removes and returns the head of the queue if possible, or nil.
- def remove
- @queue.shift
- end
+ # Removes and returns the head of the queue if possible, or +nil+.
+ def remove
+ @queue.pop
+ end
- # Remove and return the head the queue if the number of
- # available elements is strictly greater than the number of
- # threads currently waiting. Otherwise, return nil.
- def no_wait_poll
- remove if can_remove_no_wait?
- end
+ # Remove and return the head the queue if the number of
+ # available elements is strictly greater than the number of
+ # threads currently waiting. Otherwise, return +nil+.
+ def no_wait_poll
+ remove if can_remove_no_wait?
+ end
- # Waits on the queue up to +timeout+ seconds, then removes and
- # returns the head of the queue.
- def wait_poll(timeout)
- @num_waiting += 1
+ # Waits on the queue up to +timeout+ seconds, then removes and
+ # returns the head of the queue.
+ def wait_poll(timeout)
+ @num_waiting += 1
- t0 = Time.now
- elapsed = 0
- loop do
- @cond.wait(timeout - elapsed)
+ t0 = Concurrent.monotonic_time
+ elapsed = 0
+ loop do
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ @cond.wait(timeout - elapsed)
+ end
- return remove if any?
+ return remove if any?
- elapsed = Time.now - t0
- if elapsed >= timeout
- msg = 'could not obtain a connection from the pool within %0.3f seconds (waited %0.3f seconds); all pooled connections were in use' %
- [timeout, elapsed]
- raise ConnectionTimeoutError, msg
+ elapsed = Concurrent.monotonic_time - t0
+ if elapsed >= timeout
+ msg = "could not obtain a connection from the pool within %0.3f seconds (waited %0.3f seconds); all pooled connections were in use" %
+ [timeout, elapsed]
+ raise ConnectionTimeoutError, msg
+ end
end
+ ensure
+ @num_waiting -= 1
end
- ensure
- @num_waiting -= 1
- end
end
# Adds the ability to turn a basic fair FIFO queue into one
@@ -268,25 +267,25 @@ module ActiveRecord
# Connections must be leased while holding the main pool mutex. This is
# an internal subclass that also +.leases+ returned connections while
# still in queue's critical section (queue synchronizes with the same
- # +@lock+ as the main pool) so that a returned connection is already
+ # <tt>@lock</tt> as the main pool) so that a returned connection is already
# leased and there is no need to re-enter synchronized block.
class ConnectionLeasingQueue < Queue # :nodoc:
include BiasableQueue
private
- def internal_poll(timeout)
- conn = super
- conn.lease if conn
- conn
- end
+ def internal_poll(timeout)
+ conn = super
+ conn.lease if conn
+ conn
+ end
end
- # Every +frequency+ seconds, the reaper will call +reap+ on +pool+.
- # A reaper instantiated with a nil frequency will never reap the
- # connection pool.
+ # Every +frequency+ seconds, the reaper will call +reap+ and +flush+ on
+ # +pool+. A reaper instantiated with a zero frequency will never reap
+ # the connection pool.
#
- # Configure the frequency by setting "reaping_frequency" in your
- # database yaml file.
+ # Configure the frequency by setting +reaping_frequency+ in your database
+ # yaml file (default 60 seconds).
class Reaper
attr_reader :pool, :frequency
@@ -296,17 +295,19 @@ module ActiveRecord
end
def run
- return unless frequency
+ return unless frequency && frequency > 0
Thread.new(frequency, pool) { |t, p|
- while true
+ loop do
sleep t
p.reap
+ p.flush
end
}
end
end
include MonitorMixin
+ include QueryCache::ConnectionPoolConfiguration
attr_accessor :automatic_reconnect, :checkout_timeout, :schema_cache
attr_reader :spec, :connections, :size, :reaper
@@ -323,23 +324,25 @@ module ActiveRecord
@spec = spec
@checkout_timeout = (spec.config[:checkout_timeout] && spec.config[:checkout_timeout].to_f) || 5
- @reaper = Reaper.new(self, (spec.config[:reaping_frequency] && spec.config[:reaping_frequency].to_f))
- @reaper.run
+ if @idle_timeout = spec.config.fetch(:idle_timeout, 300)
+ @idle_timeout = @idle_timeout.to_f
+ @idle_timeout = nil if @idle_timeout <= 0
+ end
# default max pool size to 5
@size = (spec.config[:pool] && spec.config[:pool].to_i) || 5
- # The cache of threads mapped to reserved connections, the sole purpose
- # of the cache is to speed-up +connection+ method, it is not the authoritative
- # registry of which thread owns which connection, that is tracked by
- # +connection.owner+ attr on each +connection+ instance.
+ # This variable tracks the cache of threads mapped to reserved connections, with the
+ # sole purpose of speeding up the +connection+ method. It is not the authoritative
+ # registry of which thread owns which connection. Connection ownership is tracked by
+ # the +connection.owner+ attr on each +connection+ instance.
# The invariant works like this: if there is mapping of <tt>thread => conn</tt>,
- # then that +thread+ does indeed own that +conn+, however an absence of a such
- # mapping does not mean that the +thread+ doesn't own the said connection, in
+ # then that +thread+ does indeed own that +conn+. However, an absence of a such
+ # mapping does not mean that the +thread+ doesn't own the said connection. In
# that case +conn.owner+ attr should be consulted.
- # Access and modification of +@thread_cached_conns+ does not require
+ # Access and modification of <tt>@thread_cached_conns</tt> does not require
# synchronization.
- @thread_cached_conns = Concurrent::Map.new(:initial_capacity => @size)
+ @thread_cached_conns = Concurrent::Map.new(initial_capacity: @size)
@connections = []
@automatic_reconnect = true
@@ -349,10 +352,25 @@ module ActiveRecord
# currently in the process of independently establishing connections to the DB.
@now_connecting = 0
- # A boolean toggle that allows/disallows new connections.
- @new_cons_enabled = true
+ @threads_blocking_new_connections = 0
@available = ConnectionLeasingQueue.new self
+
+ @lock_thread = false
+
+ # +reaping_frequency+ is configurable mostly for historical reasons, but it could
+ # also be useful if someone wants a very low +idle_timeout+.
+ reaping_frequency = spec.config.fetch(:reaping_frequency, 60)
+ @reaper = Reaper.new(self, reaping_frequency && reaping_frequency.to_f)
+ @reaper.run
+ end
+
+ def lock_thread=(lock_thread)
+ if lock_thread
+ @lock_thread = Thread.current
+ else
+ @lock_thread = nil
+ end
end
# Retrieve the connection associated with the current thread, or call
@@ -361,13 +379,13 @@ module ActiveRecord
# #connection can be called any number of times; the connection is
# held in a cache keyed by a thread.
def connection
- @thread_cached_conns[connection_cache_key(Thread.current)] ||= checkout
+ @thread_cached_conns[connection_cache_key(@lock_thread || Thread.current)] ||= checkout
end
- # Is there an open connection that is being used for the current thread?
+ # Returns true if there is an open connection being used for the current thread.
#
# This method only works for connections that have been obtained through
- # #connection or #with_connection methods, connections obtained through
+ # #connection or #with_connection methods. Connections obtained through
# #checkout will not be detected by #active_connection?
def active_connection?
@thread_cached_conns[connection_cache_key(Thread.current)]
@@ -415,7 +433,10 @@ module ActiveRecord
with_exclusively_acquired_all_connections(raise_on_acquisition_timeout) do
synchronize do
@connections.each do |conn|
- checkin conn
+ if conn.in_use?
+ conn.steal!
+ checkin conn
+ end
conn.disconnect!
end
@connections = []
@@ -426,14 +447,29 @@ module ActiveRecord
# Disconnects all connections in the pool, and clears the pool.
#
- # The pool first tries to gain ownership of all connections, if unable to
+ # The pool first tries to gain ownership of all connections. If unable to
# do so within a timeout interval (default duration is
- # <tt>spec.config[:checkout_timeout] * 2</tt> seconds), the pool is forcefully
+ # <tt>spec.config[:checkout_timeout] * 2</tt> seconds), then the pool is forcefully
# disconnected without any regard for other connection owning threads.
def disconnect!
disconnect(false)
end
+ # Discards all connections in the pool (even if they're currently
+ # leased!), along with the pool itself. Any further interaction with the
+ # pool (except #spec and #schema_cache) is undefined.
+ #
+ # See AbstractAdapter#discard!
+ def discard! # :nodoc:
+ synchronize do
+ return if @connections.nil? # already discarded
+ @connections.each do |conn|
+ conn.discard!
+ end
+ @connections = @available = @thread_cached_conns = nil
+ end
+ end
+
# Clears the cache which maps classes and re-connects connections that
# require reloading.
#
@@ -442,41 +478,27 @@ module ActiveRecord
# connections in the pool within a timeout interval (default duration is
# <tt>spec.config[:checkout_timeout] * 2</tt> seconds).
def clear_reloadable_connections(raise_on_acquisition_timeout = true)
- num_new_conns_required = 0
-
with_exclusively_acquired_all_connections(raise_on_acquisition_timeout) do
synchronize do
@connections.each do |conn|
- checkin conn
+ if conn.in_use?
+ conn.steal!
+ checkin conn
+ end
conn.disconnect! if conn.requires_reloading?
end
@connections.delete_if(&:requires_reloading?)
-
@available.clear
-
- if @connections.size < @size
- # because of the pruning done by this method, we might be running
- # low on connections, while threads stuck in queue are helpless
- # (not being able to establish new connections for themselves),
- # see also more detailed explanation in +remove+
- num_new_conns_required = num_waiting_in_queue - @connections.size
- end
-
- @connections.each do |conn|
- @available.add conn
- end
end
end
-
- bulk_make_new_connections(num_new_conns_required) if num_new_conns_required > 0
end
# Clears the cache which maps classes and re-connects connections that
# require reloading.
#
- # The pool first tries to gain ownership of all connections, if unable to
+ # The pool first tries to gain ownership of all connections. If unable to
# do so within a timeout interval (default duration is
- # <tt>spec.config[:checkout_timeout] * 2</tt> seconds), the pool forcefully
+ # <tt>spec.config[:checkout_timeout] * 2</tt> seconds), then the pool forcefully
# clears the cache and reloads connections without any regard for other
# connection owning threads.
def clear_reloadable_connections!
@@ -507,14 +529,16 @@ module ActiveRecord
# +conn+: an AbstractAdapter object, which was obtained by earlier by
# calling #checkout on this pool.
def checkin(conn)
- synchronize do
- remove_connection_from_thread_cache conn
+ conn.lock.synchronize do
+ synchronize do
+ remove_connection_from_thread_cache conn
- conn._run_checkin_callbacks do
- conn.expire
- end
+ conn._run_checkin_callbacks do
+ conn.expire
+ end
- @available.add conn
+ @available.add conn
+ end
end
end
@@ -530,20 +554,20 @@ module ActiveRecord
@available.delete conn
# @available.any_waiting? => true means that prior to removing this
- # conn, the pool was at its max size (@connections.size == @size)
- # this would mean that any threads stuck waiting in the queue wouldn't
+ # conn, the pool was at its max size (@connections.size == @size).
+ # This would mean that any threads stuck waiting in the queue wouldn't
# know they could checkout_new_connection, so let's do it for them.
# Because condition-wait loop is encapsulated in the Queue class
# (that in turn is oblivious to ConnectionPool implementation), threads
- # that are "stuck" there are helpless, they have no way of creating
+ # that are "stuck" there are helpless. They have no way of creating
# new connections and are completely reliant on us feeding available
# connections into the Queue.
needs_new_connection = @available.any_waiting?
end
# This is intentionally done outside of the synchronized section as we
- # would like not to hold the main mutex while checking out new connections,
- # thus there is some chance that needs_new_connection information is now
+ # would like not to hold the main mutex while checking out new connections.
+ # Thus there is some chance that needs_new_connection information is now
# stale, we can live with that (bulk_make_new_connections will make
# sure not to exceed the pool's @size limit).
bulk_make_new_connections(1) if needs_new_connection
@@ -556,230 +580,295 @@ module ActiveRecord
stale_connections = synchronize do
@connections.select do |conn|
conn.in_use? && !conn.owner.alive?
+ end.each do |conn|
+ conn.steal!
end
end
stale_connections.each do |conn|
- synchronize do
- if conn.active?
- conn.reset!
- checkin conn
- else
- remove conn
- end
+ if conn.active?
+ conn.reset!
+ checkin conn
+ else
+ remove conn
end
end
end
+ # Disconnect all connections that have been idle for at least
+ # +minimum_idle+ seconds. Connections currently checked out, or that were
+ # checked in less than +minimum_idle+ seconds ago, are unaffected.
+ def flush(minimum_idle = @idle_timeout)
+ return if minimum_idle.nil?
+
+ idle_connections = synchronize do
+ @connections.select do |conn|
+ !conn.in_use? && conn.seconds_idle >= minimum_idle
+ end.each do |conn|
+ conn.lease
+
+ @available.delete conn
+ @connections.delete conn
+ end
+ end
+
+ idle_connections.each do |conn|
+ conn.disconnect!
+ end
+ end
+
+ # Disconnect all currently idle connections. Connections currently checked
+ # out are unaffected.
+ def flush!
+ reap
+ flush(-1)
+ end
+
def num_waiting_in_queue # :nodoc:
@available.num_waiting
end
- private
- #--
- # this is unfortunately not concurrent
- def bulk_make_new_connections(num_new_conns_needed)
- num_new_conns_needed.times do
- # try_to_checkout_new_connection will not exceed pool's @size limit
- if new_conn = try_to_checkout_new_connection
- # make the new_conn available to the starving threads stuck @available Queue
- checkin(new_conn)
- end
- end
- end
-
- #--
- # From the discussion on GitHub:
- # https://github.com/rails/rails/pull/14938#commitcomment-6601951
- # This hook-in method allows for easier monkey-patching fixes needed by
- # JRuby users that use Fibers.
- def connection_cache_key(thread)
- thread
- end
-
- # Take control of all existing connections so a "group" action such as
- # reload/disconnect can be performed safely. It is no longer enough to
- # wrap it in +synchronize+ because some pool's actions are allowed
- # to be performed outside of the main +synchronize+ block.
- def with_exclusively_acquired_all_connections(raise_on_acquisition_timeout = true)
- with_new_connections_blocked do
- attempt_to_checkout_all_existing_connections(raise_on_acquisition_timeout)
- yield
+ # Return connection pool's usage statistic
+ # Example:
+ #
+ # ActiveRecord::Base.connection_pool.stat # => { size: 15, connections: 1, busy: 1, dead: 0, idle: 0, waiting: 0, checkout_timeout: 5 }
+ def stat
+ synchronize do
+ {
+ size: size,
+ connections: @connections.size,
+ busy: @connections.count { |c| c.in_use? && c.owner.alive? },
+ dead: @connections.count { |c| c.in_use? && !c.owner.alive? },
+ idle: @connections.count { |c| !c.in_use? },
+ waiting: num_waiting_in_queue,
+ checkout_timeout: checkout_timeout
+ }
end
end
- def attempt_to_checkout_all_existing_connections(raise_on_acquisition_timeout = true)
- collected_conns = synchronize do
- # account for our own connections
- @connections.select {|conn| conn.owner == Thread.current}
+ private
+ #--
+ # this is unfortunately not concurrent
+ def bulk_make_new_connections(num_new_conns_needed)
+ num_new_conns_needed.times do
+ # try_to_checkout_new_connection will not exceed pool's @size limit
+ if new_conn = try_to_checkout_new_connection
+ # make the new_conn available to the starving threads stuck @available Queue
+ checkin(new_conn)
+ end
+ end
end
- newly_checked_out = []
- timeout_time = Time.now + (@checkout_timeout * 2)
+ #--
+ # From the discussion on GitHub:
+ # https://github.com/rails/rails/pull/14938#commitcomment-6601951
+ # This hook-in method allows for easier monkey-patching fixes needed by
+ # JRuby users that use Fibers.
+ def connection_cache_key(thread)
+ thread
+ end
- @available.with_a_bias_for(Thread.current) do
- while true
- synchronize do
- return if collected_conns.size == @connections.size && @now_connecting == 0
- remaining_timeout = timeout_time - Time.now
- remaining_timeout = 0 if remaining_timeout < 0
- conn = checkout_for_exclusive_access(remaining_timeout)
- collected_conns << conn
- newly_checked_out << conn
- end
+ # Take control of all existing connections so a "group" action such as
+ # reload/disconnect can be performed safely. It is no longer enough to
+ # wrap it in +synchronize+ because some pool's actions are allowed
+ # to be performed outside of the main +synchronize+ block.
+ def with_exclusively_acquired_all_connections(raise_on_acquisition_timeout = true)
+ with_new_connections_blocked do
+ attempt_to_checkout_all_existing_connections(raise_on_acquisition_timeout)
+ yield
end
end
- rescue ExclusiveConnectionTimeoutError
- # <tt>raise_on_acquisition_timeout == false</tt> means we are directed to ignore any
- # timeouts and are expected to just give up: we've obtained as many connections
- # as possible, note that in a case like that we don't return any of the
- # +newly_checked_out+ connections.
- if raise_on_acquisition_timeout
+ def attempt_to_checkout_all_existing_connections(raise_on_acquisition_timeout = true)
+ collected_conns = synchronize do
+ # account for our own connections
+ @connections.select { |conn| conn.owner == Thread.current }
+ end
+
+ newly_checked_out = []
+ timeout_time = Concurrent.monotonic_time + (@checkout_timeout * 2)
+
+ @available.with_a_bias_for(Thread.current) do
+ loop do
+ synchronize do
+ return if collected_conns.size == @connections.size && @now_connecting == 0
+ remaining_timeout = timeout_time - Concurrent.monotonic_time
+ remaining_timeout = 0 if remaining_timeout < 0
+ conn = checkout_for_exclusive_access(remaining_timeout)
+ collected_conns << conn
+ newly_checked_out << conn
+ end
+ end
+ end
+ rescue ExclusiveConnectionTimeoutError
+ # <tt>raise_on_acquisition_timeout == false</tt> means we are directed to ignore any
+ # timeouts and are expected to just give up: we've obtained as many connections
+ # as possible, note that in a case like that we don't return any of the
+ # +newly_checked_out+ connections.
+
+ if raise_on_acquisition_timeout
+ release_newly_checked_out = true
+ raise
+ end
+ rescue Exception # if something else went wrong
+ # this can't be a "naked" rescue, because we have should return conns
+ # even for non-StandardErrors
release_newly_checked_out = true
raise
+ ensure
+ if release_newly_checked_out && newly_checked_out
+ # releasing only those conns that were checked out in this method, conns
+ # checked outside this method (before it was called) are not for us to release
+ newly_checked_out.each { |conn| checkin(conn) }
+ end
end
- rescue Exception # if something else went wrong
- # this can't be a "naked" rescue, because we have should return conns
- # even for non-StandardErrors
- release_newly_checked_out = true
- raise
- ensure
- if release_newly_checked_out && newly_checked_out
- # releasing only those conns that were checked out in this method, conns
- # checked outside this method (before it was called) are not for us to release
- newly_checked_out.each {|conn| checkin(conn)}
- end
- end
- #--
- # Must be called in a synchronize block.
- def checkout_for_exclusive_access(checkout_timeout)
- checkout(checkout_timeout)
- rescue ConnectionTimeoutError
- # this block can't be easily moved into attempt_to_checkout_all_existing_connections's
- # rescue block, because doing so would put it outside of synchronize section, without
- # being in a critical section thread_report might become inaccurate
- msg = "could not obtain ownership of all database connections in #{checkout_timeout} seconds"
-
- thread_report = []
- @connections.each do |conn|
- unless conn.owner == Thread.current
- thread_report << "#{conn} is owned by #{conn.owner}"
+ #--
+ # Must be called in a synchronize block.
+ def checkout_for_exclusive_access(checkout_timeout)
+ checkout(checkout_timeout)
+ rescue ConnectionTimeoutError
+ # this block can't be easily moved into attempt_to_checkout_all_existing_connections's
+ # rescue block, because doing so would put it outside of synchronize section, without
+ # being in a critical section thread_report might become inaccurate
+ msg = +"could not obtain ownership of all database connections in #{checkout_timeout} seconds"
+
+ thread_report = []
+ @connections.each do |conn|
+ unless conn.owner == Thread.current
+ thread_report << "#{conn} is owned by #{conn.owner}"
+ end
end
+
+ msg << " (#{thread_report.join(', ')})" if thread_report.any?
+
+ raise ExclusiveConnectionTimeoutError, msg
end
- msg << " (#{thread_report.join(', ')})" if thread_report.any?
+ def with_new_connections_blocked
+ synchronize do
+ @threads_blocking_new_connections += 1
+ end
- raise ExclusiveConnectionTimeoutError, msg
- end
+ yield
+ ensure
+ num_new_conns_required = 0
- def with_new_connections_blocked
- previous_value = nil
- synchronize do
- previous_value, @new_cons_enabled = @new_cons_enabled, false
- end
- yield
- ensure
- synchronize { @new_cons_enabled = previous_value }
- end
+ synchronize do
+ @threads_blocking_new_connections -= 1
- # Acquire a connection by one of 1) immediately removing one
- # from the queue of available connections, 2) creating a new
- # connection if the pool is not at capacity, 3) waiting on the
- # queue for a connection to become available.
- #
- # Raises:
- # - ActiveRecord::ConnectionTimeoutError if a connection could not be acquired
- #
- #--
- # Implementation detail: the connection returned by +acquire_connection+
- # will already be "+connection.lease+ -ed" to the current thread.
- def acquire_connection(checkout_timeout)
- # NOTE: we rely on +@available.poll+ and +try_to_checkout_new_connection+ to
- # +conn.lease+ the returned connection (and to do this in a +synchronized+
- # section), this is not the cleanest implementation, as ideally we would
- # <tt>synchronize { conn.lease }</tt> in this method, but by leaving it to +@available.poll+
- # and +try_to_checkout_new_connection+ we can piggyback on +synchronize+ sections
- # of the said methods and avoid an additional +synchronize+ overhead.
- if conn = @available.poll || try_to_checkout_new_connection
- conn
- else
- reap
- @available.poll(checkout_timeout)
+ if @threads_blocking_new_connections.zero?
+ @available.clear
+
+ num_new_conns_required = num_waiting_in_queue
+
+ @connections.each do |conn|
+ next if conn.in_use?
+
+ @available.add conn
+ num_new_conns_required -= 1
+ end
+ end
+ end
+
+ bulk_make_new_connections(num_new_conns_required) if num_new_conns_required > 0
end
- end
- #--
- # if owner_thread param is omitted, this must be called in synchronize block
- def remove_connection_from_thread_cache(conn, owner_thread = conn.owner)
- @thread_cached_conns.delete_pair(connection_cache_key(owner_thread), conn)
- end
- alias_method :release, :remove_connection_from_thread_cache
+ # Acquire a connection by one of 1) immediately removing one
+ # from the queue of available connections, 2) creating a new
+ # connection if the pool is not at capacity, 3) waiting on the
+ # queue for a connection to become available.
+ #
+ # Raises:
+ # - ActiveRecord::ConnectionTimeoutError if a connection could not be acquired
+ #
+ #--
+ # Implementation detail: the connection returned by +acquire_connection+
+ # will already be "+connection.lease+ -ed" to the current thread.
+ def acquire_connection(checkout_timeout)
+ # NOTE: we rely on <tt>@available.poll</tt> and +try_to_checkout_new_connection+ to
+ # +conn.lease+ the returned connection (and to do this in a +synchronized+
+ # section). This is not the cleanest implementation, as ideally we would
+ # <tt>synchronize { conn.lease }</tt> in this method, but by leaving it to <tt>@available.poll</tt>
+ # and +try_to_checkout_new_connection+ we can piggyback on +synchronize+ sections
+ # of the said methods and avoid an additional +synchronize+ overhead.
+ if conn = @available.poll || try_to_checkout_new_connection
+ conn
+ else
+ reap
+ @available.poll(checkout_timeout)
+ end
+ end
- def new_connection
- Base.send(spec.adapter_method, spec.config).tap do |conn|
- conn.schema_cache = schema_cache.dup if schema_cache
+ #--
+ # if owner_thread param is omitted, this must be called in synchronize block
+ def remove_connection_from_thread_cache(conn, owner_thread = conn.owner)
+ @thread_cached_conns.delete_pair(connection_cache_key(owner_thread), conn)
end
- end
+ alias_method :release, :remove_connection_from_thread_cache
- # If the pool is not at a +@size+ limit, establish new connection. Connecting
- # to the DB is done outside main synchronized section.
- #--
- # Implementation constraint: a newly established connection returned by this
- # method must be in the +.leased+ state.
- def try_to_checkout_new_connection
- # first in synchronized section check if establishing new conns is allowed
- # and increment @now_connecting, to prevent overstepping this pool's @size
- # constraint
- do_checkout = synchronize do
- if @new_cons_enabled && (@connections.size + @now_connecting) < @size
- @now_connecting += 1
+ def new_connection
+ Base.send(spec.adapter_method, spec.config).tap do |conn|
+ conn.schema_cache = schema_cache.dup if schema_cache
end
end
- if do_checkout
- begin
- # if successfully incremented @now_connecting establish new connection
- # outside of synchronized section
- conn = checkout_new_connection
- ensure
- synchronize do
- if conn
- adopt_connection(conn)
- # returned conn needs to be already leased
- conn.lease
+
+ # If the pool is not at a <tt>@size</tt> limit, establish new connection. Connecting
+ # to the DB is done outside main synchronized section.
+ #--
+ # Implementation constraint: a newly established connection returned by this
+ # method must be in the +.leased+ state.
+ def try_to_checkout_new_connection
+ # first in synchronized section check if establishing new conns is allowed
+ # and increment @now_connecting, to prevent overstepping this pool's @size
+ # constraint
+ do_checkout = synchronize do
+ if @threads_blocking_new_connections.zero? && (@connections.size + @now_connecting) < @size
+ @now_connecting += 1
+ end
+ end
+ if do_checkout
+ begin
+ # if successfully incremented @now_connecting establish new connection
+ # outside of synchronized section
+ conn = checkout_new_connection
+ ensure
+ synchronize do
+ if conn
+ adopt_connection(conn)
+ # returned conn needs to be already leased
+ conn.lease
+ end
+ @now_connecting -= 1
end
- @now_connecting -= 1
end
end
end
- end
- def adopt_connection(conn)
- conn.pool = self
- @connections << conn
- end
+ def adopt_connection(conn)
+ conn.pool = self
+ @connections << conn
+ end
- def checkout_new_connection
- raise ConnectionNotEstablished unless @automatic_reconnect
- new_connection
- end
+ def checkout_new_connection
+ raise ConnectionNotEstablished unless @automatic_reconnect
+ new_connection
+ end
- def checkout_and_verify(c)
- c._run_checkout_callbacks do
- c.verify!
+ def checkout_and_verify(c)
+ c._run_checkout_callbacks do
+ c.verify!
+ end
+ c
+ rescue
+ remove c
+ c.disconnect!
+ raise
end
- c
- rescue
- remove c
- c.disconnect!
- raise
- end
end
# ConnectionHandler is a collection of ConnectionPool objects. It is used
- # for keeping separate connection pools for Active Record models that connect
- # to different databases.
+ # for keeping separate connection pools that connect to different databases.
#
# For example, suppose that you have 5 models, with the following hierarchy:
#
@@ -790,7 +879,7 @@ module ActiveRecord
# end
#
# class Book < ActiveRecord::Base
- # establish_connection "library_db"
+ # establish_connection :library_db
# end
#
# class ScaryBook < Book
@@ -821,28 +910,67 @@ module ActiveRecord
# ConnectionHandler accessible via ActiveRecord::Base.connection_handler.
# All Active Record models use this handler to determine the connection pool that they
# should use.
+ #
+ # The ConnectionHandler class is not coupled with the Active models, as it has no knowledge
+ # about the model. The model needs to pass a specification name to the handler,
+ # in order to look up the correct connection pool.
class ConnectionHandler
- def initialize
- # These caches are keyed by klass.name, NOT klass. Keying them by klass
- # alone would lead to memory leaks in development mode as all previous
- # instances of the class would stay in memory.
- @owner_to_pool = Concurrent::Map.new(:initial_capacity => 2) do |h,k|
- h[k] = Concurrent::Map.new(:initial_capacity => 2)
+ def self.create_owner_to_pool # :nodoc:
+ Concurrent::Map.new(initial_capacity: 2) do |h, k|
+ # Discard the parent's connection pools immediately; we have no need
+ # of them
+ discard_unowned_pools(h)
+
+ h[k] = Concurrent::Map.new(initial_capacity: 2)
+ end
+ end
+
+ def self.unowned_pool_finalizer(pid_map) # :nodoc:
+ lambda do |_|
+ discard_unowned_pools(pid_map)
end
- @class_to_pool = Concurrent::Map.new(:initial_capacity => 2) do |h,k|
- h[k] = Concurrent::Map.new
+ end
+
+ def self.discard_unowned_pools(pid_map) # :nodoc:
+ pid_map.each do |pid, pools|
+ pools.values.compact.each(&:discard!) unless pid == Process.pid
end
end
+ def initialize
+ # These caches are keyed by spec.name (ConnectionSpecification#name).
+ @owner_to_pool = ConnectionHandler.create_owner_to_pool
+
+ # Backup finalizer: if the forked child never needed a pool, the above
+ # early discard has not occurred
+ ObjectSpace.define_finalizer self, ConnectionHandler.unowned_pool_finalizer(@owner_to_pool)
+ end
+
def connection_pool_list
owner_to_pool.values.compact
end
alias :connection_pools :connection_pool_list
- def establish_connection(owner, spec)
- @class_to_pool.clear
- raise RuntimeError, "Anonymous class is not allowed." unless owner.name
- owner_to_pool[owner.name] = ConnectionAdapters::ConnectionPool.new(spec)
+ def establish_connection(config)
+ resolver = ConnectionSpecification::Resolver.new(Base.configurations)
+ spec = resolver.spec(config)
+
+ remove_connection(spec.name)
+
+ message_bus = ActiveSupport::Notifications.instrumenter
+ payload = {
+ connection_id: object_id
+ }
+ if spec
+ payload[:spec_name] = spec.name
+ payload[:config] = spec.config
+ end
+
+ message_bus.instrument("!connection.active_record", payload) do
+ owner_to_pool[spec.name] = ConnectionAdapters::ConnectionPool.new(spec)
+ end
+
+ owner_to_pool[spec.name]
end
# Returns true if there are any active connections among the connection
@@ -869,87 +997,81 @@ module ActiveRecord
connection_pool_list.each(&:disconnect!)
end
+ # Disconnects all currently idle connections.
+ #
+ # See ConnectionPool#flush! for details.
+ def flush_idle_connections!
+ connection_pool_list.each(&:flush!)
+ end
+
# Locate the connection of the nearest super class. This can be an
# active or defined connection: if it is the latter, it will be
# opened and set as the active connection for the class it was defined
# for (not necessarily the current class).
- def retrieve_connection(klass) #:nodoc:
- pool = retrieve_connection_pool(klass)
- raise ConnectionNotEstablished, "No connection pool for #{klass}" unless pool
- conn = pool.connection
- raise ConnectionNotEstablished, "No connection for #{klass} in connection pool" unless conn
- conn
+ def retrieve_connection(spec_name) #:nodoc:
+ pool = retrieve_connection_pool(spec_name)
+
+ unless pool
+ # multiple database application
+ if ActiveRecord::Base.connection_handler != ActiveRecord::Base.default_connection_handler
+ raise ConnectionNotEstablished, "No connection pool with '#{spec_name}' found for the '#{ActiveRecord::Base.current_role}' role."
+ else
+ raise ConnectionNotEstablished, "No connection pool with '#{spec_name}' found."
+ end
+ end
+
+ pool.connection
end
# Returns true if a connection that's accessible to this class has
# already been opened.
- def connected?(klass)
- conn = retrieve_connection_pool(klass)
- conn && conn.connected?
+ def connected?(spec_name)
+ pool = retrieve_connection_pool(spec_name)
+ pool && pool.connected?
end
# Remove the connection for this class. This will close the active
# connection and the defined connection (if they exist). The result
- # can be used as an argument for establish_connection, for easily
+ # can be used as an argument for #establish_connection, for easily
# re-establishing the connection.
- def remove_connection(owner)
- if pool = owner_to_pool.delete(owner.name)
- @class_to_pool.clear
+ def remove_connection(spec_name)
+ if pool = owner_to_pool.delete(spec_name)
pool.automatic_reconnect = false
pool.disconnect!
pool.spec.config
end
end
- # Retrieving the connection pool happens a lot so we cache it in @class_to_pool.
+ # Retrieving the connection pool happens a lot, so we cache it in @owner_to_pool.
# This makes retrieving the connection pool O(1) once the process is warm.
# When a connection is established or removed, we invalidate the cache.
- #
- # Ideally we would use #fetch here, as class_to_pool[klass] may sometimes be nil.
- # However, benchmarking (https://gist.github.com/jonleighton/3552829) showed that
- # #fetch is significantly slower than #[]. So in the nil case, no caching will
- # take place, but that's ok since the nil case is not the common one that we wish
- # to optimise for.
- def retrieve_connection_pool(klass)
- class_to_pool[klass.name] ||= begin
- until pool = pool_for(klass)
- klass = klass.superclass
- break unless klass <= Base
- end
-
- class_to_pool[klass.name] = pool
- end
- end
-
- private
-
- def owner_to_pool
- @owner_to_pool[Process.pid]
- end
-
- def class_to_pool
- @class_to_pool[Process.pid]
- end
-
- def pool_for(owner)
- owner_to_pool.fetch(owner.name) {
- if ancestor_pool = pool_from_any_process_for(owner)
+ def retrieve_connection_pool(spec_name)
+ owner_to_pool.fetch(spec_name) do
+ # Check if a connection was previously established in an ancestor process,
+ # which may have been forked.
+ if ancestor_pool = pool_from_any_process_for(spec_name)
# A connection was established in an ancestor process that must have
# subsequently forked. We can't reuse the connection, but we can copy
# the specification and establish a new connection with it.
- establish_connection(owner, ancestor_pool.spec).tap do |pool|
+ establish_connection(ancestor_pool.spec.to_hash).tap do |pool|
pool.schema_cache = ancestor_pool.schema_cache if ancestor_pool.schema_cache
end
else
- owner_to_pool[owner.name] = nil
+ owner_to_pool[spec_name] = nil
end
- }
+ end
end
- def pool_from_any_process_for(owner)
- owner_to_pool = @owner_to_pool.values.find { |v| v[owner.name] }
- owner_to_pool && owner_to_pool[owner.name]
- end
+ private
+
+ def owner_to_pool
+ @owner_to_pool[Process.pid]
+ end
+
+ def pool_from_any_process_for(spec_name)
+ owner_to_pool = @owner_to_pool.values.reverse.find { |v| v[spec_name] }
+ owner_to_pool && owner_to_pool[spec_name]
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb b/activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb
index 6711049588..1305216be2 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb
@@ -1,7 +1,10 @@
+# frozen_string_literal: true
+
+require "active_support/deprecation"
+
module ActiveRecord
module ConnectionAdapters # :nodoc:
module DatabaseLimits
-
# Returns the maximum length of a table alias.
def table_alias_length
255
@@ -11,11 +14,13 @@ module ActiveRecord
def column_name_length
64
end
+ deprecate :column_name_length
# Returns the maximum length of a table name.
def table_name_length
64
end
+ deprecate :table_name_length
# Returns the maximum allowed length for an index name. This
# limit is enforced by \Rails and is less than or equal to
@@ -35,19 +40,22 @@ module ActiveRecord
def columns_per_table
1024
end
+ deprecate :columns_per_table
# Returns the maximum number of indexes per table.
def indexes_per_table
16
end
+ deprecate :indexes_per_table
# Returns the maximum number of columns in a multicolumn index.
def columns_per_multicolumn_index
16
end
+ deprecate :columns_per_multicolumn_index
# Returns the maximum number of elements in an IN (x,y,z) clause.
- # nil means no limit.
+ # +nil+ means no limit.
def in_clause_length
nil
end
@@ -56,12 +64,18 @@ module ActiveRecord
def sql_query_length
1048575
end
+ deprecate :sql_query_length
# Returns maximum number of joins in a single query.
def joins_per_query
256
end
+ deprecate :joins_per_query
+ private
+ def bind_params_length
+ 65535
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb b/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb
index 824040775d..6aacbe5f88 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters # :nodoc:
module DatabaseStatements
@@ -7,34 +9,61 @@ module ActiveRecord
end
# Converts an arel AST to SQL
- def to_sql(arel, binds = [])
- if arel.respond_to?(:ast)
- collected = visitor.accept(arel.ast, collector)
- collected.compile(binds.dup, self)
+ def to_sql(arel_or_sql_string, binds = [])
+ sql, _ = to_sql_and_binds(arel_or_sql_string, binds)
+ sql
+ end
+
+ def to_sql_and_binds(arel_or_sql_string, binds = []) # :nodoc:
+ if arel_or_sql_string.respond_to?(:ast)
+ unless binds.empty?
+ raise "Passing bind parameters with an arel AST is forbidden. " \
+ "The values must be stored on the AST directly"
+ end
+
+ if prepared_statements
+ sql, binds = visitor.compile(arel_or_sql_string.ast, collector)
+
+ if binds.length > bind_params_length
+ unprepared_statement do
+ sql, binds = to_sql_and_binds(arel_or_sql_string)
+ visitor.preparable = false
+ end
+ end
+ else
+ sql = visitor.compile(arel_or_sql_string.ast, collector)
+ end
+ [sql.freeze, binds]
else
- arel
+ visitor.preparable = false if prepared_statements
+ [arel_or_sql_string.dup.freeze, binds]
end
end
+ private :to_sql_and_binds
# This is used in the StatementCache object. It returns an object that
# can be used to query the database repeatedly.
- def cacheable_query(arel) # :nodoc:
+ def cacheable_query(klass, arel) # :nodoc:
if prepared_statements
- ActiveRecord::StatementCache.query visitor, arel.ast
+ sql, binds = visitor.compile(arel.ast, collector)
+ query = klass.query(sql)
else
- ActiveRecord::StatementCache.partial_query visitor, arel.ast, collector
+ collector = klass.partial_query_collector
+ parts, binds = visitor.compile(arel.ast, collector)
+ query = klass.partial_query(parts)
end
+ [query, binds]
end
# Returns an ActiveRecord::Result instance.
def select_all(arel, name = nil, binds = [], preparable: nil)
- arel, binds = binds_from_relation arel, binds
- sql = to_sql(arel, binds)
- if !prepared_statements || (arel.is_a?(String) && preparable.nil?)
- preparable = false
- else
- preparable = visitor.preparable
+ arel = arel_from_relation(arel)
+ sql, binds = to_sql_and_binds(arel, binds)
+
+ if preparable.nil?
+ preparable = prepared_statements ? visitor.preparable : false
end
+
if prepared_statements && preparable
select_prepared(sql, name, binds)
else
@@ -50,24 +79,37 @@ module ActiveRecord
# Returns a single value from a record
def select_value(arel, name = nil, binds = [])
- arel, binds = binds_from_relation arel, binds
- if result = select_rows(to_sql(arel, binds), name, binds).first
- result.first
- end
+ single_value_from_rows(select_rows(arel, name, binds))
end
# Returns an array of the values of the first column in a select:
# select_values("SELECT id FROM companies LIMIT 3") => [1,2,3]
def select_values(arel, name = nil, binds = [])
- arel, binds = binds_from_relation arel, binds
- select_rows(to_sql(arel, binds), name, binds).map(&:first)
+ select_rows(arel, name, binds).map(&:first)
end
# Returns an array of arrays containing the field values.
# Order is the same as that returned by +columns+.
- def select_rows(sql, name = nil, binds = [])
+ def select_rows(arel, name = nil, binds = [])
+ select_all(arel, name, binds).rows
+ end
+
+ def query_value(sql, name = nil) # :nodoc:
+ single_value_from_rows(query(sql, name))
+ end
+
+ def query_values(sql, name = nil) # :nodoc:
+ query(sql, name).map(&:first)
+ end
+
+ def query(sql, name = nil) # :nodoc:
+ exec_query(sql, name).rows
+ end
+
+ # Determines whether the SQL statement is a write query.
+ def write_query?(sql)
+ raise NotImplementedError
end
- undef_method :select_rows
# Executes the SQL statement in the context of this connection and returns
# the raw result from the connection adapter.
@@ -75,76 +117,80 @@ module ActiveRecord
# method may be manually memory managed. Consider using the exec_query
# wrapper instead.
def execute(sql, name = nil)
+ raise NotImplementedError
end
- undef_method :execute
# Executes +sql+ statement in the context of this connection using
# +binds+ as the bind substitutes. +name+ is logged along with
# the executed +sql+ statement.
- def exec_query(sql, name = 'SQL', binds = [], prepare: false)
+ def exec_query(sql, name = "SQL", binds = [], prepare: false)
+ raise NotImplementedError
end
# Executes insert +sql+ statement in the context of this connection using
# +binds+ as the bind substitutes. +name+ is logged along with
# the executed +sql+ statement.
- def exec_insert(sql, name, binds, pk = nil, sequence_name = nil)
+ def exec_insert(sql, name = nil, binds = [], pk = nil, sequence_name = nil)
+ sql, binds = sql_for_insert(sql, pk, sequence_name, binds)
exec_query(sql, name, binds)
end
# Executes delete +sql+ statement in the context of this connection using
# +binds+ as the bind substitutes. +name+ is logged along with
# the executed +sql+ statement.
- def exec_delete(sql, name, binds)
+ def exec_delete(sql, name = nil, binds = [])
exec_query(sql, name, binds)
end
- # Executes the truncate statement.
- def truncate(table_name, name = nil)
- raise NotImplementedError
- end
-
# Executes update +sql+ statement in the context of this connection using
# +binds+ as the bind substitutes. +name+ is logged along with
# the executed +sql+ statement.
- def exec_update(sql, name, binds)
+ def exec_update(sql, name = nil, binds = [])
exec_query(sql, name, binds)
end
# Executes an INSERT query and returns the new record's ID
#
- # +id_value+ will be returned unless the value is nil, in
+ # +id_value+ will be returned unless the value is +nil+, in
# which case the database will attempt to calculate the last inserted
# id and return that value.
#
# If the next id was calculated in advance (as in Oracle), it should be
# passed in as +id_value+.
def insert(arel, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds = [])
- sql, binds, pk, sequence_name = sql_for_insert(to_sql(arel, binds), pk, id_value, sequence_name, binds)
+ sql, binds = to_sql_and_binds(arel, binds)
value = exec_insert(sql, name, binds, pk, sequence_name)
id_value || last_inserted_id(value)
end
alias create insert
- alias insert_sql insert
- deprecate insert_sql: :insert
# Executes the update statement and returns the number of rows affected.
def update(arel, name = nil, binds = [])
- exec_update(to_sql(arel, binds), name, binds)
+ sql, binds = to_sql_and_binds(arel, binds)
+ exec_update(sql, name, binds)
end
- alias update_sql update
- deprecate update_sql: :update
# Executes the delete statement and returns the number of rows affected.
def delete(arel, name = nil, binds = [])
- exec_delete(to_sql(arel, binds), name, binds)
+ sql, binds = to_sql_and_binds(arel, binds)
+ exec_delete(sql, name, binds)
+ end
+
+ # Executes the truncate statement.
+ def truncate(table_name, name = nil)
+ execute(build_truncate_statements(table_name), name)
end
- alias delete_sql delete
- deprecate delete_sql: :delete
- # Returns +true+ when the connection adapter supports prepared statement
- # caching, otherwise returns +false+
- def supports_statement_cache?
- false
+ def truncate_tables(*table_names) # :nodoc:
+ return if table_names.empty?
+
+ with_multi_statements do
+ disable_referential_integrity do
+ Array(build_truncate_statements(*table_names)).each do |sql|
+ execute_batch(sql, "Truncate Tables")
+ end
+ end
+ end
end
# Runs the given block in a database transaction, and returns the result
@@ -158,7 +204,7 @@ module ActiveRecord
#
# In order to get around this problem, #transaction will emulate the effect
# of nested transactions, by using savepoints:
- # http://dev.mysql.com/doc/refman/5.7/en/savepoint.html
+ # https://dev.mysql.com/doc/refman/5.7/en/savepoint.html
# Savepoints are supported by MySQL and PostgreSQL. SQLite3 version >= '3.6.8'
# supports savepoints.
#
@@ -210,7 +256,7 @@ module ActiveRecord
# You should consult the documentation for your database to understand the
# semantics of these different levels:
#
- # * http://www.postgresql.org/docs/current/static/transaction-iso.html
+ # * https://www.postgresql.org/docs/current/static/transaction-iso.html
# * https://dev.mysql.com/doc/refman/5.7/en/set-transaction.html
#
# An ActiveRecord::TransactionIsolationError will be raised if:
@@ -220,9 +266,7 @@ module ActiveRecord
# * You are creating a nested (savepoint) transaction
#
# The mysql2 and postgresql adapters support setting the transaction
- # isolation level. However, support is disabled for MySQL versions below 5,
- # because they are affected by a bug[http://bugs.mysql.com/bug.php?id=39170]
- # which means the isolation level gets persisted outside the transaction.
+ # isolation level.
def transaction(requires_new: nil, isolation: nil, joinable: true)
if !requires_new && current_transaction.joinable?
if isolation
@@ -238,14 +282,16 @@ module ActiveRecord
attr_reader :transaction_manager #:nodoc:
- delegate :within_new_transaction, :open_transactions, :current_transaction, :begin_transaction, :commit_transaction, :rollback_transaction, to: :transaction_manager
+ delegate :within_new_transaction, :open_transactions, :current_transaction, :begin_transaction,
+ :commit_transaction, :rollback_transaction, :materialize_transactions,
+ :disable_lazy_transactions!, :enable_lazy_transactions!, to: :transaction_manager
def transaction_open?
current_transaction.open?
end
def reset_transaction #:nodoc:
- @transaction_manager = TransactionManager.new(self)
+ @transaction_manager = ConnectionAdapters::TransactionManager.new(self)
end
# Register a record with the current transaction so that its after_commit and after_rollback callbacks
@@ -292,9 +338,6 @@ module ActiveRecord
exec_rollback_to_savepoint(name)
end
- def exec_rollback_to_savepoint(name = nil) #:nodoc:
- end
-
def default_sequence_name(table, column)
nil
end
@@ -306,70 +349,134 @@ module ActiveRecord
# Inserts the given fixture into the table. Overridden in adapters that require
# something beyond a simple insert (eg. Oracle).
+ # Most of adapters should implement `insert_fixtures_set` that leverages bulk SQL insert.
+ # We keep this method to provide fallback
+ # for databases like sqlite that do not support bulk inserts.
def insert_fixture(fixture, table_name)
- fixture = fixture.stringify_keys
+ execute(build_fixture_sql(Array.wrap(fixture), table_name), "Fixture Insert")
+ end
- columns = schema_cache.columns_hash(table_name)
- binds = fixture.map do |name, value|
- if column = columns[name]
- type = lookup_cast_type_from_column(column)
- Relation::QueryAttribute.new(name, value, type)
- else
- raise Fixture::FixtureError, %(table "#{table_name}" has no column named #{name.inspect}.)
- end
- end
- key_list = fixture.keys.map { |name| quote_column_name(name) }
- value_list = prepare_binds_for_database(binds).map do |value|
- begin
- quote(value)
- rescue TypeError
- quote(YAML.dump(value))
+ def insert_fixtures_set(fixture_set, tables_to_delete = [])
+ fixture_inserts = build_fixture_statements(fixture_set)
+ table_deletes = tables_to_delete.map { |table| "DELETE FROM #{quote_table_name(table)}" }
+ total_sql = Array(combine_multi_statements(table_deletes + fixture_inserts))
+
+ with_multi_statements do
+ disable_referential_integrity do
+ transaction(requires_new: true) do
+ total_sql.each do |sql|
+ execute_batch(sql, "Fixtures Load")
+ end
+ end
end
end
-
- execute "INSERT INTO #{quote_table_name(table_name)} (#{key_list.join(', ')}) VALUES (#{value_list.join(', ')})", 'Fixture Insert'
end
- def empty_insert_statement_value
+ def empty_insert_statement_value(primary_key = nil)
"DEFAULT VALUES"
end
# Sanitizes the given LIMIT parameter in order to prevent SQL injection.
#
# The +limit+ may be anything that can evaluate to a string via #to_s. It
- # should look like an integer, or a comma-delimited list of integers, or
- # an Arel SQL literal.
+ # should look like an integer, or an Arel SQL literal.
#
# Returns Integer and Arel::Nodes::SqlLiteral limits as is.
- # Returns the sanitized limit parameter, either as an integer, or as a
- # string which contains a comma-delimited list of integers.
def sanitize_limit(limit)
if limit.is_a?(Integer) || limit.is_a?(Arel::Nodes::SqlLiteral)
limit
- elsif limit.to_s.include?(',')
- Arel.sql limit.to_s.split(',').map{ |i| Integer(i) }.join(',')
else
Integer(limit)
end
end
- # The default strategy for an UPDATE with joins is to use a subquery. This doesn't work
- # on MySQL (even when aliasing the tables), but MySQL allows using JOIN directly in
- # an UPDATE statement, so in the MySQL adapters we redefine this to do that.
- def join_to_update(update, select, key) # :nodoc:
- subselect = subquery_for(key, select)
-
- update.where key.in(subselect)
+ # Fixture value is quoted by Arel, however scalar values
+ # are not quotable. In this case we want to convert
+ # the column value to YAML.
+ def with_yaml_fallback(value) # :nodoc:
+ if value.is_a?(Hash) || value.is_a?(Array)
+ YAML.dump(value)
+ else
+ value
+ end
end
- alias join_to_delete join_to_update
- protected
+ private
+ def execute_batch(sql, name = nil)
+ execute(sql, name)
+ end
+
+ DEFAULT_INSERT_VALUE = Arel.sql("DEFAULT").freeze
+ private_constant :DEFAULT_INSERT_VALUE
+
+ def default_insert_value(column)
+ DEFAULT_INSERT_VALUE
+ end
+
+ def build_fixture_sql(fixtures, table_name)
+ columns = schema_cache.columns_hash(table_name)
+
+ values_list = fixtures.map do |fixture|
+ fixture = fixture.stringify_keys
+
+ unknown_columns = fixture.keys - columns.keys
+ if unknown_columns.any?
+ raise Fixture::FixtureError, %(table "#{table_name}" has no columns named #{unknown_columns.map(&:inspect).join(', ')}.)
+ end
+
+ columns.map do |name, column|
+ if fixture.key?(name)
+ type = lookup_cast_type_from_column(column)
+ bind = Relation::QueryAttribute.new(name, fixture[name], type)
+ with_yaml_fallback(bind.value_for_database)
+ else
+ default_insert_value(column)
+ end
+ end
+ end
- # Returns a subquery for the given key using the join information.
- def subquery_for(key, select)
- subselect = select.clone
- subselect.projections = [key]
- subselect
+ table = Arel::Table.new(table_name)
+ manager = Arel::InsertManager.new
+ manager.into(table)
+
+ if values_list.size == 1
+ values = values_list.shift
+ new_values = []
+ columns.each_key.with_index { |column, i|
+ unless values[i].equal?(DEFAULT_INSERT_VALUE)
+ new_values << values[i]
+ manager.columns << table[column]
+ end
+ }
+ values_list << new_values
+ else
+ columns.each_key { |column| manager.columns << table[column] }
+ end
+
+ manager.values = manager.create_values_list(values_list)
+ manager.to_sql
+ end
+
+ def build_fixture_statements(fixture_set)
+ fixture_set.map do |table_name, fixtures|
+ next if fixtures.empty?
+ build_fixture_sql(fixtures, table_name)
+ end.compact
+ end
+
+ def build_truncate_statements(*table_names)
+ truncate_tables = table_names.map do |table_name|
+ "TRUNCATE TABLE #{quote_table_name(table_name)}"
+ end
+ combine_multi_statements(truncate_tables)
+ end
+
+ def with_multi_statements
+ yield
+ end
+
+ def combine_multi_statements(total_sql)
+ total_sql.join(";\n")
end
# Returns an ActiveRecord::Result instance.
@@ -381,20 +488,25 @@ module ActiveRecord
exec_query(sql, name, binds, prepare: true)
end
- def sql_for_insert(sql, pk, id_value, sequence_name, binds)
- [sql, binds, pk, sequence_name]
+ def sql_for_insert(sql, pk, sequence_name, binds)
+ [sql, binds]
end
def last_inserted_id(result)
- row = result.rows.first
+ single_value_from_rows(result.rows)
+ end
+
+ def single_value_from_rows(rows)
+ row = rows.first
row && row.first
end
- def binds_from_relation(relation, binds)
- if relation.is_a?(Relation) && binds.empty?
- relation, binds = relation.arel, relation.bound_attributes
+ def arel_from_relation(relation)
+ if relation.is_a?(Relation)
+ relation.arel
+ else
+ relation
end
- [relation, binds]
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb b/activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb
index 0bdfd4f900..93b1c4e632 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb
@@ -1,16 +1,23 @@
+# frozen_string_literal: true
+
+require "concurrent/map"
+
module ActiveRecord
module ConnectionAdapters # :nodoc:
module QueryCache
class << self
def included(base) #:nodoc:
dirties_query_cache base, :insert, :update, :delete, :rollback_to_savepoint, :rollback_db_transaction
+
+ base.set_callback :checkout, :after, :configure_query_cache!
+ base.set_callback :checkin, :after, :disable_query_cache!
end
def dirties_query_cache(base, *method_names)
method_names.each do |method_name|
base.class_eval <<-end_code, __FILE__, __LINE__ + 1
def #{method_name}(*)
- clear_query_cache if @query_cache_enabled
+ ActiveRecord::Base.clear_query_caches_for_current_thread if @query_cache_enabled
super
end
end_code
@@ -18,11 +25,32 @@ module ActiveRecord
end
end
+ module ConnectionPoolConfiguration
+ def initialize(*)
+ super
+ @query_cache_enabled = Concurrent::Map.new { false }
+ end
+
+ def enable_query_cache!
+ @query_cache_enabled[connection_cache_key(Thread.current)] = true
+ connection.enable_query_cache! if active_connection?
+ end
+
+ def disable_query_cache!
+ @query_cache_enabled.delete connection_cache_key(Thread.current)
+ connection.disable_query_cache! if active_connection?
+ end
+
+ def query_cache_enabled
+ @query_cache_enabled[connection_cache_key(Thread.current)]
+ end
+ end
+
attr_reader :query_cache, :query_cache_enabled
def initialize(*)
super
- @query_cache = Hash.new { |h,sql| h[sql] = {} }
+ @query_cache = Hash.new { |h, sql| h[sql] = {} }
@query_cache_enabled = false
end
@@ -41,6 +69,7 @@ module ActiveRecord
def disable_query_cache!
@query_cache_enabled = false
+ clear_query_cache
end
# Disable the query cache within the block.
@@ -58,14 +87,21 @@ module ActiveRecord
# the same SQL query and repeatedly return the same result each time, silently
# undermining the randomness you were expecting.
def clear_query_cache
- @query_cache.clear
+ @lock.synchronize do
+ @query_cache.clear
+ end
end
def select_all(arel, name = nil, binds = [], preparable: nil)
if @query_cache_enabled && !locked?(arel)
- arel, binds = binds_from_relation arel, binds
- sql = to_sql(arel, binds)
- cache_sql(sql, binds) { super(sql, name, binds, preparable: preparable) }
+ arel = arel_from_relation(arel)
+ sql, binds = to_sql_and_binds(arel, binds)
+
+ if preparable.nil?
+ preparable = prepared_statements ? visitor.preparable : false
+ end
+
+ cache_sql(sql, name, binds) { super(sql, name, binds, preparable: preparable) }
else
super
end
@@ -73,23 +109,45 @@ module ActiveRecord
private
- def cache_sql(sql, binds)
- result =
- if @query_cache[sql].key?(binds)
- ActiveSupport::Notifications.instrument("sql.active_record",
- :sql => sql, :binds => binds, :name => "CACHE", :connection_id => object_id)
- @query_cache[sql][binds]
- else
- @query_cache[sql][binds] = yield
+ def cache_sql(sql, name, binds)
+ @lock.synchronize do
+ result =
+ if @query_cache[sql].key?(binds)
+ ActiveSupport::Notifications.instrument(
+ "sql.active_record",
+ cache_notification_info(sql, name, binds)
+ )
+ @query_cache[sql][binds]
+ else
+ @query_cache[sql][binds] = yield
+ end
+ result.dup
end
- result.dup
- end
+ end
- # If arel is locked this is a SELECT ... FOR UPDATE or somesuch. Such
- # queries should not be cached.
- def locked?(arel)
- arel.respond_to?(:locked) && arel.locked
- end
+ # Database adapters can override this method to
+ # provide custom cache information.
+ def cache_notification_info(sql, name, binds)
+ {
+ sql: sql,
+ binds: binds,
+ type_casted_binds: -> { type_casted_binds(binds) },
+ name: name,
+ connection_id: object_id,
+ cached: true
+ }
+ end
+
+ # If arel is locked this is a SELECT ... FOR UPDATE or somesuch. Such
+ # queries should not be cached.
+ def locked?(arel)
+ arel = arel.arel if arel.is_a?(Relation)
+ arel.respond_to?(:locked) && arel.locked
+ end
+
+ def configure_query_cache!
+ enable_query_cache! if pool.query_cache_enabled
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb b/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb
index 2eeefb13d7..2877530917 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb
@@ -1,22 +1,18 @@
-require 'active_support/core_ext/big_decimal/conversions'
+# frozen_string_literal: true
+
+require "active_support/core_ext/big_decimal/conversions"
+require "active_support/multibyte/chars"
module ActiveRecord
module ConnectionAdapters # :nodoc:
module Quoting
# Quotes the column value to help prevent
- # {SQL injection attacks}[http://en.wikipedia.org/wiki/SQL_injection].
- def quote(value, column = nil)
- # records are quoted as their primary key
- return value.quoted_id if value.respond_to?(:quoted_id)
+ # {SQL injection attacks}[https://en.wikipedia.org/wiki/SQL_injection].
+ def quote(value)
+ value = id_value_for_database(value) if value.is_a?(Base)
- if column
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
- Passing a column to `quote` has been deprecated. It is only used
- for type casting, which should be handled elsewhere. See
- https://github.com/rails/arel/commit/6160bfbda1d1781c3b08a33ec4955f170e95be11
- for more information.
- MSG
- value = type_cast_from_column(column, value)
+ if value.respond_to?(:value_for_database)
+ value = value.value_for_database
end
_quote(value)
@@ -26,9 +22,7 @@ module ActiveRecord
# SQLite does not understand dates, so this method will convert a Date
# to a String.
def type_cast(value, column = nil)
- if value.respond_to?(:quoted_id) && value.respond_to?(:id)
- return value.id
- end
+ value = id_value_for_database(value) if value.is_a?(Base)
if column
value = type_cast_from_column(column, value)
@@ -63,21 +57,10 @@ module ActiveRecord
lookup_cast_type(column.sql_type)
end
- def fetch_type_metadata(sql_type)
- cast_type = lookup_cast_type(sql_type)
- SqlTypeMetadata.new(
- sql_type: sql_type,
- type: cast_type.type,
- limit: cast_type.limit,
- precision: cast_type.precision,
- scale: cast_type.scale,
- )
- end
-
# Quotes a string, escaping any ' (single quote) and \ (backslash)
# characters.
def quote_string(s)
- s.gsub('\\'.freeze, '\&\&'.freeze).gsub("'".freeze, "''".freeze) # ' (for ruby-mode)
+ s.gsub('\\', '\&\&').gsub("'", "''") # ' (for ruby-mode)
end
# Quotes the column name. Defaults to no quoting.
@@ -112,19 +95,19 @@ module ActiveRecord
end
def quoted_true
- "'t'"
+ "TRUE"
end
def unquoted_true
- 't'
+ true
end
def quoted_false
- "'f'"
+ "FALSE"
end
def unquoted_false
- 'f'
+ false
end
# Quote date/time values for use in SQL input. Includes microseconds
@@ -146,47 +129,70 @@ module ActiveRecord
end
end
- def prepare_binds_for_database(binds) # :nodoc:
- binds.map(&:value_for_database)
+ def quoted_time(value) # :nodoc:
+ value = value.change(year: 2000, month: 1, day: 1)
+ quoted_date(value).sub(/\A\d\d\d\d-\d\d-\d\d /, "")
+ end
+
+ def quoted_binary(value) # :nodoc:
+ "'#{quote_string(value.to_s)}'"
+ end
+
+ def sanitize_as_sql_comment(value) # :nodoc:
+ value.to_s.gsub(%r{ (/ (?: | \g<1>) \*) \+? \s* | \s* (\* (?: | \g<2>) /) }x, "")
end
private
+ def type_casted_binds(binds)
+ if binds.first.is_a?(Array)
+ binds.map { |column, value| type_cast(value, column) }
+ else
+ binds.map { |attr| type_cast(attr.value_for_database) }
+ end
+ end
- def types_which_need_no_typecasting
- [nil, Numeric, String]
- end
-
- def _quote(value)
- case value
- when String, ActiveSupport::Multibyte::Chars, Type::Binary::Data
- "'#{quote_string(value.to_s)}'"
- when true then quoted_true
- when false then quoted_false
- when nil then "NULL"
- # BigDecimals need to be put in a non-normalized form and quoted.
- when BigDecimal then value.to_s('F')
- when Numeric, ActiveSupport::Duration then value.to_s
- when Date, Time then "'#{quoted_date(value)}'"
- when Symbol then "'#{quote_string(value.to_s)}'"
- when Class then "'#{value}'"
- else raise TypeError, "can't quote #{value.class.name}"
+ def lookup_cast_type(sql_type)
+ type_map.lookup(sql_type)
end
- end
- def _type_cast(value)
- case value
- when Symbol, ActiveSupport::Multibyte::Chars, Type::Binary::Data
- value.to_s
- when true then unquoted_true
- when false then unquoted_false
- # BigDecimals need to be put in a non-normalized form and quoted.
- when BigDecimal then value.to_s('F')
- when Date, Time then quoted_date(value)
- when *types_which_need_no_typecasting
- value
- else raise TypeError
+ def id_value_for_database(value)
+ if primary_key = value.class.primary_key
+ value.instance_variable_get(:@attributes)[primary_key].value_for_database
+ end
+ end
+
+ def _quote(value)
+ case value
+ when String, Symbol, ActiveSupport::Multibyte::Chars
+ "'#{quote_string(value.to_s)}'"
+ when true then quoted_true
+ when false then quoted_false
+ when nil then "NULL"
+ # BigDecimals need to be put in a non-normalized form and quoted.
+ when BigDecimal then value.to_s("F")
+ when Numeric, ActiveSupport::Duration then value.to_s
+ when Type::Binary::Data then quoted_binary(value)
+ when Type::Time::Value then "'#{quoted_time(value)}'"
+ when Date, Time then "'#{quoted_date(value)}'"
+ when Class then "'#{value}'"
+ else raise TypeError, "can't quote #{value.class.name}"
+ end
+ end
+
+ def _type_cast(value)
+ case value
+ when Symbol, ActiveSupport::Multibyte::Chars, Type::Binary::Data
+ value.to_s
+ when true then unquoted_true
+ when false then unquoted_false
+ # BigDecimals need to be put in a non-normalized form and quoted.
+ when BigDecimal then value.to_s("F")
+ when nil, Numeric, String then value
+ when Type::Time::Value then quoted_time(value)
+ when Date, Time then quoted_date(value)
+ else raise TypeError
+ end
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/savepoints.rb b/activerecord/lib/active_record/connection_adapters/abstract/savepoints.rb
index c0662f8473..52a796b926 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/savepoints.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/savepoints.rb
@@ -1,8 +1,10 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
- module Savepoints #:nodoc:
- def supports_savepoints?
- true
+ module Savepoints
+ def current_savepoint_name
+ current_transaction.savepoint_name
end
def create_savepoint(name = current_savepoint_name)
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb
index 0ba4d94e3c..7d20825a75 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb
@@ -1,4 +1,4 @@
-require 'active_support/core_ext/string/strip'
+# frozen_string_literal: true
module ActiveRecord
module ConnectionAdapters
@@ -15,32 +15,33 @@ module ActiveRecord
end
delegate :quote_column_name, :quote_table_name, :quote_default_expression, :type_to_sql,
- :options_include_default?, :supports_indexes_in_create?, :supports_foreign_keys?, :foreign_key_options, to: :@conn
- private :quote_column_name, :quote_table_name, :quote_default_expression, :type_to_sql,
- :options_include_default?, :supports_indexes_in_create?, :supports_foreign_keys?, :foreign_key_options
+ :options_include_default?, :supports_indexes_in_create?, :supports_foreign_keys?, :foreign_key_options,
+ to: :@conn, private: true
private
def visit_AlterTable(o)
- sql = "ALTER TABLE #{quote_table_name(o.name)} "
- sql << o.adds.map { |col| accept col }.join(' ')
- sql << o.foreign_key_adds.map { |fk| visit_AddForeignKey fk }.join(' ')
- sql << o.foreign_key_drops.map { |fk| visit_DropForeignKey fk }.join(' ')
+ sql = +"ALTER TABLE #{quote_table_name(o.name)} "
+ sql << o.adds.map { |col| accept col }.join(" ")
+ sql << o.foreign_key_adds.map { |fk| visit_AddForeignKey fk }.join(" ")
+ sql << o.foreign_key_drops.map { |fk| visit_DropForeignKey fk }.join(" ")
end
def visit_ColumnDefinition(o)
- o.sql_type ||= type_to_sql(o.type, o.limit, o.precision, o.scale)
- column_sql = "#{quote_column_name(o.name)} #{o.sql_type}"
+ o.sql_type = type_to_sql(o.type, o.options)
+ column_sql = +"#{quote_column_name(o.name)} #{o.sql_type}"
add_column_options!(column_sql, column_options(o)) unless o.type == :primary_key
column_sql
end
def visit_AddColumnDefinition(o)
- "ADD #{accept(o.column)}"
+ +"ADD #{accept(o.column)}"
end
def visit_TableDefinition(o)
- create_sql = "CREATE#{' TEMPORARY' if o.temporary} TABLE #{quote_table_name(o.name)} "
+ create_sql = +"CREATE#{table_modifier_in_create(o)} TABLE "
+ create_sql << "IF NOT EXISTS " if o.if_not_exists
+ create_sql << "#{quote_table_name(o.name)} "
statements = o.columns.map { |c| accept c }
statements << accept(o.primary_keys) if o.primary_keys
@@ -53,18 +54,18 @@ module ActiveRecord
statements.concat(o.foreign_keys.map { |to_table, options| foreign_key_in_create(o.name, to_table, options) })
end
- create_sql << "(#{statements.join(', ')}) " if statements.present?
- create_sql << "#{o.options}"
- create_sql << " AS #{@conn.to_sql(o.as)}" if o.as
+ create_sql << "(#{statements.join(', ')})" if statements.present?
+ add_table_options!(create_sql, table_options(o))
+ create_sql << " AS #{to_sql(o.as)}" if o.as
create_sql
end
def visit_PrimaryKeyDefinition(o)
- "PRIMARY KEY (#{o.name.join(', ')})"
+ "PRIMARY KEY (#{o.name.map { |name| quote_column_name(name) }.join(', ')})"
end
def visit_ForeignKeyDefinition(o)
- sql = <<-SQL.strip_heredoc
+ sql = +<<~SQL
CONSTRAINT #{quote_column_name(o.name)}
FOREIGN KEY (#{quote_column_name(o.column)})
REFERENCES #{quote_table_name(o.to_table)} (#{quote_column_name(o.primary_key)})
@@ -82,17 +83,22 @@ module ActiveRecord
"DROP CONSTRAINT #{quote_column_name(name)}"
end
+ def table_options(o)
+ table_options = {}
+ table_options[:comment] = o.comment
+ table_options[:options] = o.options
+ table_options
+ end
+
+ def add_table_options!(create_sql, options)
+ if options_sql = options[:options]
+ create_sql << " #{options_sql}"
+ end
+ create_sql
+ end
+
def column_options(o)
- column_options = {}
- column_options[:null] = o.null unless o.null.nil?
- column_options[:default] = o.default unless o.default.nil?
- column_options[:column] = o
- column_options[:first] = o.first
- column_options[:after] = o.after
- column_options[:auto_increment] = o.auto_increment
- column_options[:primary_key] = o.primary_key
- column_options[:collation] = o.collation
- column_options
+ o.options.merge(column: o)
end
def add_column_options!(sql, options)
@@ -110,7 +116,20 @@ module ActiveRecord
sql
end
+ def to_sql(sql)
+ sql = sql.to_sql if sql.respond_to?(:to_sql)
+ sql
+ end
+
+ # Returns any SQL string to go between CREATE and TABLE. May be nil.
+ def table_modifier_in_create(o)
+ " TEMPORARY" if o.temporary
+ end
+
def foreign_key_in_create(from_table, to_table, options)
+ prefix = ActiveRecord::Base.table_name_prefix
+ suffix = ActiveRecord::Base.table_name_suffix
+ to_table = "#{prefix}#{to_table}#{suffix}"
options = foreign_key_options(from_table, to_table, options)
accept ForeignKeyDefinition.new(from_table, to_table, options)
end
@@ -121,7 +140,7 @@ module ActiveRecord
when :cascade then "ON #{action} CASCADE"
when :restrict then "ON #{action} RESTRICT"
else
- raise ArgumentError, <<-MSG.strip_heredoc
+ raise ArgumentError, <<~MSG
'#{dependency}' is not supported for :on_update or :on_delete.
Supported values are: :nullify, :cascade, :restrict
MSG
@@ -129,5 +148,6 @@ module ActiveRecord
end
end
end
+ SchemaCreation = AbstractAdapter::SchemaCreation # :nodoc:
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb
index 4f97c7c065..4861872129 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb
@@ -1,32 +1,77 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters #:nodoc:
# Abstract representation of an index definition on a table. Instances of
# this type are typically created and returned by methods in database
- # adapters. e.g. ActiveRecord::ConnectionAdapters::AbstractMysqlAdapter#indexes
- class IndexDefinition < Struct.new(:table, :name, :unique, :columns, :lengths, :orders, :where, :type, :using) #:nodoc:
+ # adapters. e.g. ActiveRecord::ConnectionAdapters::MySQL::SchemaStatements#indexes
+ class IndexDefinition # :nodoc:
+ attr_reader :table, :name, :unique, :columns, :lengths, :orders, :opclasses, :where, :type, :using, :comment
+
+ def initialize(
+ table, name,
+ unique = false,
+ columns = [],
+ lengths: {},
+ orders: {},
+ opclasses: {},
+ where: nil,
+ type: nil,
+ using: nil,
+ comment: nil
+ )
+ @table = table
+ @name = name
+ @unique = unique
+ @columns = columns
+ @lengths = concise_options(lengths)
+ @orders = concise_options(orders)
+ @opclasses = concise_options(opclasses)
+ @where = where
+ @type = type
+ @using = using
+ @comment = comment
+ end
+
+ private
+ def concise_options(options)
+ if columns.size == options.size && options.values.uniq.size == 1
+ options.values.first
+ else
+ options
+ end
+ end
end
# Abstract representation of a column definition. Instances of this type
# are typically created by methods in TableDefinition, and added to the
# +columns+ attribute of said TableDefinition object, in order to be used
# for generating a number of table creation or table changing SQL statements.
- class ColumnDefinition < Struct.new(:name, :type, :limit, :precision, :scale, :default, :null, :first, :after, :auto_increment, :primary_key, :collation, :sql_type) #:nodoc:
-
+ ColumnDefinition = Struct.new(:name, :type, :options, :sql_type) do # :nodoc:
def primary_key?
- primary_key || type.to_sym == :primary_key
+ options[:primary_key]
end
- end
- class AddColumnDefinition < Struct.new(:column) # :nodoc:
- end
+ [:limit, :precision, :scale, :default, :null, :collation, :comment].each do |option_name|
+ module_eval <<-CODE, __FILE__, __LINE__ + 1
+ def #{option_name}
+ options[:#{option_name}]
+ end
- class ChangeColumnDefinition < Struct.new(:column, :name) #:nodoc:
+ def #{option_name}=(value)
+ options[:#{option_name}] = value
+ end
+ CODE
+ end
end
- class PrimaryKeyDefinition < Struct.new(:name) # :nodoc:
- end
+ AddColumnDefinition = Struct.new(:column) # :nodoc:
- class ForeignKeyDefinition < Struct.new(:from_table, :to_table, :options) #:nodoc:
+ ChangeColumnDefinition = Struct.new(:column, :name) #:nodoc:
+
+ PrimaryKeyDefinition = Struct.new(:name) # :nodoc:
+
+ ForeignKeyDefinition = Struct.new(:from_table, :to_table, :options) do #:nodoc:
def name
options[:name]
end
@@ -51,18 +96,24 @@ module ActiveRecord
options[:primary_key] != default_primary_key
end
- def defined_for?(options_or_to_table = {})
- if options_or_to_table.is_a?(Hash)
- options_or_to_table.all? {|key, value| options[key].to_s == value.to_s }
- else
- to_table == options_or_to_table.to_s
- end
+ def validate?
+ options.fetch(:validate, true)
end
+ alias validated? validate?
- private
- def default_primary_key
- "id"
+ def export_name_on_schema_dump?
+ !ActiveRecord::SchemaDumper.fk_ignore_pattern.match?(name) if name
+ end
+
+ def defined_for?(to_table: nil, **options)
+ (to_table.nil? || to_table.to_s == self.to_table) &&
+ options.all? { |k, v| self.options[k].to_s == v.to_s }
end
+
+ private
+ def default_primary_key
+ "id"
+ end
end
class ReferenceDefinition # :nodoc:
@@ -71,7 +122,7 @@ module ActiveRecord
polymorphic: false,
index: true,
foreign_key: false,
- type: :integer,
+ type: :bigint,
**options
)
@name = name
@@ -100,89 +151,87 @@ module ActiveRecord
end
end
- protected
-
- attr_reader :name, :polymorphic, :index, :foreign_key, :type, :options
-
private
+ attr_reader :name, :polymorphic, :index, :foreign_key, :type, :options
- def as_options(value, default = {})
- if value.is_a?(Hash)
- value
- else
- default
+ def as_options(value)
+ value.is_a?(Hash) ? value : {}
end
- end
- def polymorphic_options
- as_options(polymorphic, options)
- end
+ def polymorphic_options
+ as_options(polymorphic).merge(options.slice(:null, :first, :after))
+ end
- def index_options
- as_options(index)
- end
+ def index_options
+ as_options(index)
+ end
- def foreign_key_options
- as_options(foreign_key).merge(column: column_name)
- end
+ def foreign_key_options
+ as_options(foreign_key).merge(column: column_name)
+ end
- def columns
- result = [[column_name, type, options]]
- if polymorphic
- result.unshift(["#{name}_type", :string, polymorphic_options])
+ def columns
+ result = [[column_name, type, options]]
+ if polymorphic
+ result.unshift(["#{name}_type", :string, polymorphic_options])
+ end
+ result
end
- result
- end
- def column_name
- "#{name}_id"
- end
+ def column_name
+ "#{name}_id"
+ end
- def column_names
- columns.map(&:first)
- end
+ def column_names
+ columns.map(&:first)
+ end
- def foreign_table_name
- foreign_key_options.fetch(:to_table) do
- Base.pluralize_table_names ? name.to_s.pluralize : name
+ def foreign_table_name
+ foreign_key_options.fetch(:to_table) do
+ Base.pluralize_table_names ? name.to_s.pluralize : name
+ end
end
- end
end
module ColumnMethods
+ extend ActiveSupport::Concern
+
# Appends a primary key definition to the table definition.
# Can be called multiple times, but this is probably not a good idea.
def primary_key(name, type = :primary_key, **options)
column(name, type, options.merge(primary_key: true))
end
+ ##
+ # :method: column
+ # :call-seq: column(name, type, **options)
+ #
# Appends a column or columns of a specified type.
#
# t.string(:goat)
# t.string(:goat, :sheep)
#
# See TableDefinition#column
- [
- :bigint,
- :binary,
- :boolean,
- :date,
- :datetime,
- :decimal,
- :float,
- :integer,
- :string,
- :text,
- :time,
- :timestamp,
- ].each do |column_type|
- module_eval <<-CODE, __FILE__, __LINE__ + 1
- def #{column_type}(*args, **options)
- args.each { |name| column(name, :#{column_type}, options) }
+
+ included do
+ define_column_methods :bigint, :binary, :boolean, :date, :datetime, :decimal,
+ :float, :integer, :json, :string, :text, :time, :timestamp, :virtual
+
+ alias :numeric :decimal
+ end
+
+ class_methods do
+ private def define_column_methods(*column_types) # :nodoc:
+ column_types.each do |column_type|
+ module_eval <<-RUBY, __FILE__, __LINE__ + 1
+ def #{column_type}(*names, **options)
+ raise ArgumentError, "Missing column name(s) for #{column_type}" if names.empty?
+ names.each { |name| column(name, :#{column_type}, options) }
+ end
+ RUBY
end
- CODE
+ end
end
- alias_method :numeric, :decimal
end
# Represents the schema of an SQL table in an abstract way. This class
@@ -206,18 +255,29 @@ module ActiveRecord
class TableDefinition
include ColumnMethods
- attr_accessor :indexes
- attr_reader :name, :temporary, :options, :as, :foreign_keys
+ attr_reader :name, :temporary, :if_not_exists, :options, :as, :comment, :indexes, :foreign_keys
- def initialize(name, temporary, options, as = nil)
+ def initialize(
+ conn,
+ name,
+ temporary: false,
+ if_not_exists: false,
+ options: nil,
+ as: nil,
+ comment: nil,
+ **
+ )
+ @conn = conn
@columns_hash = {}
- @indexes = {}
+ @indexes = []
@foreign_keys = []
@primary_keys = nil
@temporary = temporary
+ @if_not_exists = if_not_exists
@options = options
@as = as
@name = name
+ @comment = comment
end
def primary_keys(name = nil) # :nodoc:
@@ -297,16 +357,20 @@ module ActiveRecord
#
# create_table :taggings do |t|
# t.references :tag, index: { name: 'index_taggings_on_tag_id' }
- # t.references :tagger, polymorphic: true, index: true
- # t.references :taggable, polymorphic: { default: 'Photo' }
+ # t.references :tagger, polymorphic: true
+ # t.references :taggable, polymorphic: { default: 'Photo' }, index: false
# end
- def column(name, type, options = {})
+ def column(name, type, **options)
name = name.to_s
- type = type.to_sym
+ type = type.to_sym if type
options = options.dup
- if @columns_hash[name] && @columns_hash[name].primary_key?
- raise ArgumentError, "you can't redefine the primary key column '#{name}'. To define a custom primary key, pass { id: false } to create_table."
+ if @columns_hash[name]
+ if @columns_hash[name].primary_key?
+ raise ArgumentError, "you can't redefine the primary key column '#{name}'. To define a custom primary key, pass { id: false } to create_table."
+ else
+ raise ArgumentError, "you can't define an already defined column '#{name}'."
+ end
end
index_options = options.delete(:index)
@@ -326,22 +390,24 @@ module ActiveRecord
#
# index(:account_id, name: 'index_projects_on_account_id')
def index(column_name, options = {})
- indexes[column_name] = options
+ indexes << [column_name, options]
end
def foreign_key(table_name, options = {}) # :nodoc:
- foreign_keys.push([table_name, options])
+ foreign_keys << [table_name, options]
end
# Appends <tt>:datetime</tt> columns <tt>:created_at</tt> and
# <tt>:updated_at</tt> to the table. See {connection.add_timestamps}[rdoc-ref:SchemaStatements#add_timestamps]
#
# t.timestamps null: false
- def timestamps(*args)
- options = args.extract_options!
-
+ def timestamps(**options)
options[:null] = false if options[:null].nil?
+ if !options.key?(:precision) && @conn.supports_datetime_with_precision?
+ options[:precision] = 6
+ end
+
column(:created_at, :datetime, options)
column(:updated_at, :datetime, options)
end
@@ -353,37 +419,38 @@ module ActiveRecord
#
# See {connection.add_reference}[rdoc-ref:SchemaStatements#add_reference] for details of the options you can use.
def references(*args, **options)
- args.each do |col|
- ReferenceDefinition.new(col, **options).add_to(self)
+ args.each do |ref_name|
+ ReferenceDefinition.new(ref_name, options).add_to(self)
end
end
alias :belongs_to :references
- def new_column_definition(name, type, options) # :nodoc:
+ def new_column_definition(name, type, **options) # :nodoc:
+ if integer_like_primary_key?(type, options)
+ type = integer_like_primary_key_type(type, options)
+ end
type = aliased_types(type.to_s, type)
- column = create_column_definition name, type
-
- column.limit = options[:limit]
- column.precision = options[:precision]
- column.scale = options[:scale]
- column.default = options[:default]
- column.null = options[:null]
- column.first = options[:first]
- column.after = options[:after]
- column.auto_increment = options[:auto_increment]
- column.primary_key = type == :primary_key || options[:primary_key]
- column.collation = options[:collation]
- column
+ options[:primary_key] ||= type == :primary_key
+ options[:null] = false if options[:primary_key]
+ create_column_definition(name, type, options)
end
private
- def create_column_definition(name, type)
- ColumnDefinition.new name, type
- end
+ def create_column_definition(name, type, options)
+ ColumnDefinition.new(name, type, options)
+ end
- def aliased_types(name, fallback)
- 'timestamp' == name ? :datetime : fallback
- end
+ def aliased_types(name, fallback)
+ "timestamp" == name ? :datetime : fallback
+ end
+
+ def integer_like_primary_key?(type, options)
+ options[:primary_key] && [:integer, :bigint].include?(type) && !options.key?(:default)
+ end
+
+ def integer_like_primary_key_type(type, options)
+ type
+ end
end
class AlterTable # :nodoc:
@@ -444,7 +511,11 @@ module ActiveRecord
# t.date
# t.binary
# t.boolean
+ # t.foreign_key
+ # t.json
+ # t.virtual
# t.remove
+ # t.remove_foreign_key
# t.remove_references
# t.remove_belongs_to
# t.remove_index
@@ -466,13 +537,15 @@ module ActiveRecord
# t.column(:name, :string)
#
# See TableDefinition#column for details of the options you can use.
- def column(column_name, type, options = {})
+ def column(column_name, type, **options)
+ index_options = options.delete(:index)
@base.add_column(name, column_name, type, options)
+ index(column_name, index_options.is_a?(Hash) ? index_options : {}) if index_options
end
# Checks to see if a column exists.
#
- # t.string(:name) unless t.column_exists?(:name, :string)
+ # t.string(:name) unless t.column_exists?(:name, :string)
#
# See {connection.column_exists?}[rdoc-ref:SchemaStatements#column_exists?]
def column_exists?(column_name, type = nil, options = {})
@@ -493,9 +566,9 @@ module ActiveRecord
# Checks to see if an index exists.
#
- # unless t.index_exists?(:branch_id)
- # t.index(:branch_id)
- # end
+ # unless t.index_exists?(:branch_id)
+ # t.index(:branch_id)
+ # end
#
# See {connection.index_exists?}[rdoc-ref:SchemaStatements#index_exists?]
def index_exists?(column_name, options = {})
@@ -586,8 +659,7 @@ module ActiveRecord
# t.belongs_to(:supplier, foreign_key: true)
#
# See {connection.add_reference}[rdoc-ref:SchemaStatements#add_reference] for details of the options you can use.
- def references(*args)
- options = args.extract_options!
+ def references(*args, **options)
args.each do |ref_name|
@base.add_reference(name, ref_name, options)
end
@@ -600,29 +672,39 @@ module ActiveRecord
# t.remove_belongs_to(:supplier, polymorphic: true)
#
# See {connection.remove_reference}[rdoc-ref:SchemaStatements#remove_reference]
- def remove_references(*args)
- options = args.extract_options!
+ def remove_references(*args, **options)
args.each do |ref_name|
@base.remove_reference(name, ref_name, options)
end
end
alias :remove_belongs_to :remove_references
- # Adds a foreign key.
+ # Adds a foreign key to the table using a supplied table name.
#
- # t.foreign_key(:authors)
+ # t.foreign_key(:authors)
+ # t.foreign_key(:authors, column: :author_id, primary_key: "id")
#
# See {connection.add_foreign_key}[rdoc-ref:SchemaStatements#add_foreign_key]
- def foreign_key(*args) # :nodoc:
+ def foreign_key(*args)
@base.add_foreign_key(name, *args)
end
+ # Removes the given foreign key from the table.
+ #
+ # t.remove_foreign_key(:authors)
+ # t.remove_foreign_key(column: :author_id)
+ #
+ # See {connection.remove_foreign_key}[rdoc-ref:SchemaStatements#remove_foreign_key]
+ def remove_foreign_key(*args)
+ @base.remove_foreign_key(name, *args)
+ end
+
# Checks to see if a foreign key exists.
#
- # t.foreign_key(:authors) unless t.foreign_key_exists?(:authors)
+ # t.foreign_key(:authors) unless t.foreign_key_exists?(:authors)
#
# See {connection.foreign_key_exists?}[rdoc-ref:SchemaStatements#foreign_key_exists?]
- def foreign_key_exists?(*args) # :nodoc:
+ def foreign_key_exists?(*args)
@base.foreign_key_exists?(name, *args)
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb
index 4880d216d6..622e00fffb 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb
@@ -1,103 +1,93 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters # :nodoc:
- # The goal of this module is to move Adapter specific column
- # definitions to the Adapter instead of having it in the schema
- # dumper itself. This code represents the normal case.
- # We can then redefine how certain data types may be handled in the schema dumper on the
- # Adapter level by over-writing this code inside the database specific adapters
- module ColumnDumper
- def column_spec(column)
- spec = Hash[prepare_column_options(column).map { |k, v| [k, "#{k}: #{v}"] }]
- spec[:name] = column.name.inspect
- spec[:type] = schema_type(column).to_s
- spec
- end
-
- def column_spec_for_primary_key(column)
- return {} if default_primary_key?(column)
- spec = { id: schema_type(column).inspect }
- spec.merge!(prepare_column_options(column))
+ class SchemaDumper < SchemaDumper # :nodoc:
+ def self.create(connection, options)
+ new(connection, options)
end
- # This can be overridden on an Adapter level basis to support other
- # extended datatypes (Example: Adding an array option in the
- # PostgreSQL::ColumnDumper)
- def prepare_column_options(column)
- spec = {}
-
- if limit = schema_limit(column)
- spec[:limit] = limit
+ private
+ def column_spec(column)
+ [schema_type_with_virtual(column), prepare_column_options(column)]
end
- if precision = schema_precision(column)
- spec[:precision] = precision
+ def column_spec_for_primary_key(column)
+ return {} if default_primary_key?(column)
+ spec = { id: schema_type(column).inspect }
+ spec.merge!(prepare_column_options(column).except!(:null))
+ spec[:default] ||= "nil" if explicit_primary_key_default?(column)
+ spec
end
- if scale = schema_scale(column)
- spec[:scale] = scale
+ def prepare_column_options(column)
+ spec = {}
+ spec[:limit] = schema_limit(column)
+ spec[:precision] = schema_precision(column)
+ spec[:scale] = schema_scale(column)
+ spec[:default] = schema_default(column)
+ spec[:null] = "false" unless column.null
+ spec[:collation] = schema_collation(column)
+ spec[:comment] = column.comment.inspect if column.comment.present?
+ spec.compact!
+ spec
end
- default = schema_default(column) if column.has_default?
- spec[:default] = default unless default.nil?
-
- spec[:null] = 'false' unless column.null
-
- if collation = schema_collation(column)
- spec[:collation] = collation
+ def default_primary_key?(column)
+ schema_type(column) == :bigint
end
- spec
- end
-
- # Lists the valid migration options
- def migration_keys
- [:name, :limit, :precision, :scale, :default, :null, :collation]
- end
-
- private
+ def explicit_primary_key_default?(column)
+ false
+ end
- def default_primary_key?(column)
- schema_type(column) == :integer
- end
+ def schema_type_with_virtual(column)
+ if @connection.supports_virtual_columns? && column.virtual?
+ :virtual
+ else
+ schema_type(column)
+ end
+ end
- def schema_type(column)
- if column.bigint?
- :bigint
- else
- column.type
+ def schema_type(column)
+ if column.bigint?
+ :bigint
+ else
+ column.type
+ end
end
- end
- def schema_limit(column)
- limit = column.limit unless column.bigint?
- limit.inspect if limit && limit != native_database_types[column.type][:limit]
- end
+ def schema_limit(column)
+ limit = column.limit unless column.bigint?
+ limit.inspect if limit && limit != @connection.native_database_types[column.type][:limit]
+ end
- def schema_precision(column)
- column.precision.inspect if column.precision
- end
+ def schema_precision(column)
+ column.precision.inspect if column.precision
+ end
- def schema_scale(column)
- column.scale.inspect if column.scale
- end
+ def schema_scale(column)
+ column.scale.inspect if column.scale
+ end
- def schema_default(column)
- type = lookup_cast_type_from_column(column)
- default = type.deserialize(column.default)
- if default.nil?
- schema_expression(column)
- else
- type.type_cast_for_schema(default)
+ def schema_default(column)
+ return unless column.has_default?
+ type = @connection.lookup_cast_type_from_column(column)
+ default = type.deserialize(column.default)
+ if default.nil?
+ schema_expression(column)
+ else
+ type.type_cast_for_schema(default)
+ end
end
- end
- def schema_expression(column)
- "-> { #{column.default_function.inspect} }" if column.default_function
- end
+ def schema_expression(column)
+ "-> { #{column.default_function.inspect} }" if column.default_function
+ end
- def schema_collation(column)
- column.collation.inspect if column.collation
- end
+ def schema_collation(column)
+ column.collation.inspect if column.collation
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
index 020d9bbdca..6981ea6ecd 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
@@ -1,6 +1,9 @@
-require 'active_record/migration/join_table'
-require 'active_support/core_ext/string/access'
-require 'digest'
+# frozen_string_literal: true
+
+require "active_record/migration/join_table"
+require "active_support/core_ext/string/access"
+require "active_support/deprecation"
+require "digest/sha2"
module ActiveRecord
module ConnectionAdapters # :nodoc:
@@ -18,14 +21,21 @@ module ActiveRecord
nil
end
+ # Returns the table comment that's stored in database metadata.
+ def table_comment(table_name)
+ nil
+ end
+
# Truncates a table alias according to the limits of the current adapter.
def table_alias_for(table_name)
- table_name[0...table_alias_length].tr('.', '_')
+ table_name[0...table_alias_length].tr(".", "_")
end
# Returns the relation names useable to back Active Record models.
# For most adapters this means all #tables and #views.
def data_sources
+ query_values(data_source_sql, "SCHEMA")
+ rescue NotImplementedError
tables | views
end
@@ -34,12 +44,14 @@ module ActiveRecord
# data_source_exists?(:ebooks)
#
def data_source_exists?(name)
+ query_values(data_source_sql(name), "SCHEMA").any? if name.present?
+ rescue NotImplementedError
data_sources.include?(name.to_s)
end
# Returns an array of table names defined in the database.
- def tables(name = nil)
- raise NotImplementedError, "#tables is not implemented"
+ def tables
+ query_values(data_source_sql(type: "BASE TABLE"), "SCHEMA")
end
# Checks to see if the table +table_name+ exists on the database.
@@ -47,12 +59,14 @@ module ActiveRecord
# table_exists?(:developers)
#
def table_exists?(table_name)
+ query_values(data_source_sql(table_name, type: "BASE TABLE"), "SCHEMA").any? if table_name.present?
+ rescue NotImplementedError
tables.include?(table_name.to_s)
end
# Returns an array of view names defined in the database.
def views
- raise NotImplementedError, "#views is not implemented"
+ query_values(data_source_sql(type: "VIEW"), "SCHEMA")
end
# Checks to see if the view +view_name+ exists on the database.
@@ -60,11 +74,15 @@ module ActiveRecord
# view_exists?(:ebooks)
#
def view_exists?(view_name)
+ query_values(data_source_sql(view_name, type: "VIEW"), "SCHEMA").any? if view_name.present?
+ rescue NotImplementedError
views.include?(view_name.to_s)
end
# Returns an array of indexes for the given table.
- # def indexes(table_name, name = nil) end
+ def indexes(table_name)
+ raise NotImplementedError, "#indexes is not implemented"
+ end
# Checks to see if an index exists on a table for a given index definition.
#
@@ -90,10 +108,12 @@ module ActiveRecord
indexes(table_name).any? { |i| checks.all? { |check| check[i] } }
end
- # Returns an array of Column objects for the table specified by +table_name+.
- # See the concrete implementation for details on the expected parameter values.
+ # Returns an array of +Column+ objects for the table specified by +table_name+.
def columns(table_name)
- raise NotImplementedError, "#columns is not implemented"
+ table_name = table_name.to_s
+ column_definitions(table_name).map do |field|
+ new_column_from_field(table_name, field)
+ end
end
# Checks to see if a column exists in a given table.
@@ -110,12 +130,12 @@ module ActiveRecord
# column_exists?(:suppliers, :name, :string, null: false)
# column_exists?(:suppliers, :tax, :decimal, precision: 8, scale: 2)
#
- def column_exists?(table_name, column_name, type = nil, options = {})
+ def column_exists?(table_name, column_name, type = nil, **options)
column_name = column_name.to_s
checks = []
checks << lambda { |c| c.name == column_name }
- checks << lambda { |c| c.type == type } if type
- (migration_keys - [:name]).each do |attr|
+ checks << lambda { |c| c.type == type.to_sym rescue nil } if type
+ column_options_keys.each do |attr|
checks << lambda { |c| c.send(attr) == options[attr] } if options.key?(attr)
end
@@ -124,14 +144,9 @@ module ActiveRecord
# Returns just a table's primary key
def primary_key(table_name)
- pks = primary_keys(table_name)
- warn <<-WARNING.strip_heredoc if pks.count > 1
- WARNING: Rails does not support composite primary key.
-
- #{table_name} has composite primary key. Composite primary key is ignored.
- WARNING
-
- pks.first if pks.one?
+ pk = primary_keys(table_name)
+ pk = pk.first unless pk.size > 1
+ pk
end
# Creates a new table with the name +table_name+. +table_name+ may either
@@ -174,7 +189,9 @@ module ActiveRecord
# A Symbol can be used to specify the type of the generated primary key column.
# [<tt>:primary_key</tt>]
# The name of the primary key, if one is to be added automatically.
- # Defaults to +id+. If <tt>:id</tt> is false this option is ignored.
+ # Defaults to +id+. If <tt>:id</tt> is false, then this option is ignored.
+ #
+ # If an array is passed, a composite primary key will be created.
#
# Note that Active Record models will automatically detect their
# primary key. This can be avoided by using
@@ -189,19 +206,22 @@ module ActiveRecord
# Set to true to drop the table before creating it.
# Set to +:cascade+ to drop dependent objects as well.
# Defaults to false.
+ # [<tt>:if_not_exists</tt>]
+ # Set to true to avoid raising an error when the table already exists.
+ # Defaults to false.
# [<tt>:as</tt>]
# SQL to use to generate the table. When this option is used, the block is
# ignored, as are the <tt>:id</tt> and <tt>:primary_key</tt> options.
#
# ====== Add a backend specific option to the generated SQL (MySQL)
#
- # create_table(:suppliers, options: 'ENGINE=InnoDB DEFAULT CHARSET=utf8')
+ # create_table(:suppliers, options: 'ENGINE=InnoDB DEFAULT CHARSET=utf8mb4')
#
# generates:
#
# CREATE TABLE suppliers (
- # id int auto_increment PRIMARY KEY
- # ) ENGINE=InnoDB DEFAULT CHARSET=utf8
+ # id bigint auto_increment PRIMARY KEY
+ # ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
#
# ====== Rename the primary key column
#
@@ -212,7 +232,7 @@ module ActiveRecord
# generates:
#
# CREATE TABLE objects (
- # guid int auto_increment PRIMARY KEY,
+ # guid bigint auto_increment PRIMARY KEY,
# name varchar(80)
# )
#
@@ -229,18 +249,35 @@ module ActiveRecord
# label varchar
# )
#
+ # ====== Create a composite primary key
+ #
+ # create_table(:orders, primary_key: [:product_id, :client_id]) do |t|
+ # t.belongs_to :product
+ # t.belongs_to :client
+ # end
+ #
+ # generates:
+ #
+ # CREATE TABLE order (
+ # product_id bigint NOT NULL,
+ # client_id bigint NOT NULL
+ # );
+ #
+ # ALTER TABLE ONLY "orders"
+ # ADD CONSTRAINT orders_pkey PRIMARY KEY (product_id, client_id);
+ #
# ====== Do not add a primary key column
#
# create_table(:categories_suppliers, id: false) do |t|
- # t.column :category_id, :integer
- # t.column :supplier_id, :integer
+ # t.column :category_id, :bigint
+ # t.column :supplier_id, :bigint
# end
#
# generates:
#
# CREATE TABLE categories_suppliers (
- # category_id int,
- # supplier_id int
+ # category_id bigint,
+ # supplier_id bigint
# )
#
# ====== Create a temporary table based on a query
@@ -254,8 +291,8 @@ module ActiveRecord
# SELECT * FROM orders INNER JOIN line_items ON order_id=orders.id
#
# See also TableDefinition#column for details on how to create columns.
- def create_table(table_name, options = {})
- td = create_table_definition table_name, options[:temporary], options[:options], options[:as]
+ def create_table(table_name, **options)
+ td = create_table_definition(table_name, options)
if options[:id] != false && !options[:as]
pk = options.fetch(:primary_key) do
@@ -271,18 +308,28 @@ module ActiveRecord
yield td if block_given?
- if options[:force] && data_source_exists?(table_name)
- drop_table(table_name, options)
+ if options[:force]
+ drop_table(table_name, options.merge(if_exists: true))
end
result = execute schema_creation.accept td
unless supports_indexes_in_create?
- td.indexes.each_pair do |column_name, index_options|
+ td.indexes.each do |column_name, index_options|
add_index(table_name, column_name, index_options)
end
end
+ if supports_comments? && !supports_comments_in_create?
+ if table_comment = options[:comment].presence
+ change_table_comment(table_name, table_comment)
+ end
+
+ td.columns.each do |column|
+ change_column_comment(table_name, column.name, column.comment) if column.comment.present?
+ end
+ end
+
result
end
@@ -292,9 +339,9 @@ module ActiveRecord
# # Creates a table called 'assemblies_parts' with no id.
# create_join_table(:assemblies, :parts)
#
- # You can pass a +options+ hash can include the following keys:
+ # You can pass an +options+ hash which can include the following keys:
# [<tt>:table_name</tt>]
- # Sets the table name overriding the default
+ # Sets the table name, overriding the default.
# [<tt>:column_options</tt>]
# Any extra options you want appended to the columns definition.
# [<tt>:options</tt>]
@@ -320,21 +367,20 @@ module ActiveRecord
# generates:
#
# CREATE TABLE assemblies_parts (
- # assembly_id int NOT NULL,
- # part_id int NOT NULL,
+ # assembly_id bigint NOT NULL,
+ # part_id bigint NOT NULL,
# ) ENGINE=InnoDB DEFAULT CHARSET=utf8
#
- def create_join_table(table_1, table_2, options = {})
+ def create_join_table(table_1, table_2, column_options: {}, **options)
join_table_name = find_join_table_name(table_1, table_2, options)
- column_options = options.delete(:column_options) || {}
- column_options.reverse_merge!(null: false)
+ column_options.reverse_merge!(null: false, index: false)
- t1_column, t2_column = [table_1, table_2].map{ |t| t.to_s.singularize.foreign_key }
+ t1_ref, t2_ref = [table_1, table_2].map { |t| t.to_s.singularize }
create_table(join_table_name, options.merge!(id: false)) do |td|
- td.integer t1_column, column_options
- td.integer t2_column, column_options
+ td.references t1_ref, column_options
+ td.references t2_ref, column_options
yield td if block_given?
end
end
@@ -366,6 +412,8 @@ module ActiveRecord
#
# Defaults to false.
#
+ # Only supported on the MySQL and PostgreSQL adapter, ignored elsewhere.
+ #
# ====== Add a column
#
# change_table(:suppliers) do |t|
@@ -390,7 +438,7 @@ module ActiveRecord
# t.references :company
# end
#
- # Creates a <tt>company_id(integer)</tt> column.
+ # Creates a <tt>company_id(bigint)</tt> column.
#
# ====== Add a polymorphic foreign key column
#
@@ -398,7 +446,7 @@ module ActiveRecord
# t.belongs_to :company, polymorphic: true
# end
#
- # Creates <tt>company_type(varchar)</tt> and <tt>company_id(integer)</tt> columns.
+ # Creates <tt>company_type(varchar)</tt> and <tt>company_id(bigint)</tt> columns.
#
# ====== Remove a column
#
@@ -419,7 +467,7 @@ module ActiveRecord
# t.remove_index :company_id
# end
#
- # See also Table for details on all of the various column transformation.
+ # See also Table for details on all of the various column transformations.
def change_table(table_name, options = {})
if supports_bulk_alter? && options[:bulk]
recorder = ActiveRecord::Migration::CommandRecorder.new(self)
@@ -469,19 +517,24 @@ module ActiveRecord
#
# Available options are (none of these exists by default):
# * <tt>:limit</tt> -
- # Requests a maximum column length. This is number of characters for a <tt>:string</tt> column
+ # Requests a maximum column length. This is the number of characters for a <tt>:string</tt> column
# and number of bytes for <tt>:text</tt>, <tt>:binary</tt> and <tt>:integer</tt> columns.
+ # This option is ignored by some backends.
# * <tt>:default</tt> -
- # The column's default value. Use nil for NULL.
+ # The column's default value. Use +nil+ for +NULL+.
# * <tt>:null</tt> -
- # Allows or disallows +NULL+ values in the column. This option could
- # have been named <tt>:null_allowed</tt>.
+ # Allows or disallows +NULL+ values in the column.
# * <tt>:precision</tt> -
# Specifies the precision for the <tt>:decimal</tt> and <tt>:numeric</tt> columns.
# * <tt>:scale</tt> -
# Specifies the scale for the <tt>:decimal</tt> and <tt>:numeric</tt> columns.
+ # * <tt>:collation</tt> -
+ # Specifies the collation for a <tt>:string</tt> or <tt>:text</tt> column. If not specified, the
+ # column will have the same collation as the table.
+ # * <tt>:comment</tt> -
+ # Specifies the comment for the column. This option is ignored by some backends.
#
- # Note: The precision is the total number of significant digits
+ # Note: The precision is the total number of significant digits,
# and the scale is the number of digits that can be stored following
# the decimal point. For example, the number 123.45 has a precision of 5
# and a scale of 2. A decimal with a precision of 5 and a scale of 2 can
@@ -502,7 +555,7 @@ module ActiveRecord
# Default is (38,0).
# * DB2: <tt>:precision</tt> [1..63], <tt>:scale</tt> [0..62].
# Default unknown.
- # * SqlServer?: <tt>:precision</tt> [1..38], <tt>:scale</tt> [0..38].
+ # * SqlServer: <tt>:precision</tt> [1..38], <tt>:scale</tt> [0..38].
# Default (38,0).
#
# == Examples
@@ -524,10 +577,14 @@ module ActiveRecord
# add_column(:measurements, :huge_integer, :decimal, precision: 30)
# # ALTER TABLE "measurements" ADD "huge_integer" decimal(30)
#
+ # # Defines a column that stores an array of a type.
+ # add_column(:users, :skills, :text, array: true)
+ # # ALTER TABLE "users" ADD "skills" text[]
+ #
# # Defines a column with a database-specific type.
# add_column(:shapes, :triangle, 'polygon')
# # ALTER TABLE "shapes" ADD "triangle" polygon
- def add_column(table_name, column_name, type, options = {})
+ def add_column(table_name, column_name, type, **options)
at = create_alter_table table_name
at.add_column(column_name, type, options)
execute schema_creation.accept at
@@ -550,9 +607,10 @@ module ActiveRecord
#
# The +type+ and +options+ parameters will be ignored if present. It can be helpful
# to provide these in a migration's +change+ method so it can be reverted.
- # In that case, +type+ and +options+ will be used by add_column.
+ # In that case, +type+ and +options+ will be used by #add_column.
+ # Indexes on the column are automatically removed.
def remove_column(table_name, column_name, type = nil, options = {})
- execute "ALTER TABLE #{quote_table_name(table_name)} DROP #{quote_column_name(column_name)}"
+ execute "ALTER TABLE #{quote_table_name(table_name)} #{remove_column_for_alter(table_name, column_name, type, options)}"
end
# Changes the column's definition according to the new options.
@@ -667,7 +725,7 @@ module ActiveRecord
#
# CREATE INDEX by_branch_desc_party ON accounts(branch_id DESC, party_id ASC, surname)
#
- # Note: MySQL doesn't yet support index order (it accepts the syntax but ignores it).
+ # Note: MySQL only supports index order from 8.0.1 onwards (earlier versions accepted the syntax but ignored it).
#
# ====== Creating a partial index
#
@@ -690,6 +748,19 @@ module ActiveRecord
#
# Note: only supported by PostgreSQL and MySQL
#
+ # ====== Creating an index with a specific operator class
+ #
+ # add_index(:developers, :name, using: 'gist', opclass: :gist_trgm_ops)
+ # # CREATE INDEX developers_on_name ON developers USING gist (name gist_trgm_ops) -- PostgreSQL
+ #
+ # add_index(:developers, [:name, :city], using: 'gist', opclass: { city: :gist_trgm_ops })
+ # # CREATE INDEX developers_on_name_and_city ON developers USING gist (name, city gist_trgm_ops) -- PostgreSQL
+ #
+ # add_index(:developers, [:name, :city], using: 'gist', opclass: :gist_trgm_ops)
+ # # CREATE INDEX developers_on_name_and_city ON developers USING gist (name gist_trgm_ops, city gist_trgm_ops) -- PostgreSQL
+ #
+ # Note: only supported by PostgreSQL
+ #
# ====== Creating an index with a specific type
#
# add_index(:developers, :name, type: :fulltext)
@@ -736,7 +807,7 @@ module ActiveRecord
def rename_index(table_name, old_name, new_name)
validate_index_length!(table_name, new_name)
- # this is a naive implementation; some DBs may support this more efficiently (Postgres, for instance)
+ # this is a naive implementation; some DBs may support this more efficiently (PostgreSQL, for instance)
old_index_def = indexes(table_name).detect { |i| i.name == old_name }
return unless old_index_def
add_index(table_name, old_index_def.columns, name: new_name, unique: old_index_def.unique)
@@ -753,30 +824,27 @@ module ActiveRecord
raise ArgumentError, "You must specify the index name"
end
else
- index_name(table_name, :column => options)
+ index_name(table_name, index_name_options(options))
end
end
# Verifies the existence of an index with a given name.
- #
- # The default argument is returned if the underlying implementation does not define the indexes method,
- # as there's no way to determine the correct answer in that case.
- def index_name_exists?(table_name, index_name, default)
- return default unless respond_to?(:indexes)
+ def index_name_exists?(table_name, index_name)
index_name = index_name.to_s
indexes(table_name).detect { |i| i.name == index_name }
end
- # Adds a reference. The reference column is an integer by default,
+ # Adds a reference. The reference column is a bigint by default,
# the <tt>:type</tt> option can be used to specify a different type.
# Optionally adds a +_type+ column, if <tt>:polymorphic</tt> option is provided.
# #add_reference and #add_belongs_to are acceptable.
#
# The +options+ hash can include the following keys:
# [<tt>:type</tt>]
- # The reference column type. Defaults to +:integer+.
+ # The reference column type. Defaults to +:bigint+.
# [<tt>:index</tt>]
- # Add an appropriate index. Defaults to false.
+ # Add an appropriate index. Defaults to true.
+ # See #add_index for usage of this option.
# [<tt>:foreign_key</tt>]
# Add an appropriate foreign key constraint. Defaults to false.
# [<tt>:polymorphic</tt>]
@@ -784,17 +852,25 @@ module ActiveRecord
# [<tt>:null</tt>]
# Whether the column allows nulls. Defaults to true.
#
- # ====== Create a user_id integer column
+ # ====== Create a user_id bigint column without an index
#
- # add_reference(:products, :user)
+ # add_reference(:products, :user, index: false)
#
# ====== Create a user_id string column
#
# add_reference(:products, :user, type: :string)
#
- # ====== Create supplier_id, supplier_type columns and appropriate index
+ # ====== Create supplier_id, supplier_type columns
+ #
+ # add_reference(:products, :supplier, polymorphic: true)
#
- # add_reference(:products, :supplier, polymorphic: true, index: true)
+ # ====== Create a supplier_id column with a unique index
+ #
+ # add_reference(:products, :supplier, index: { unique: true })
+ #
+ # ====== Create a supplier_id column with a named index
+ #
+ # add_reference(:products, :supplier, index: { name: "my_supplier_index" })
#
# ====== Create a supplier_id column and appropriate foreign key
#
@@ -804,8 +880,8 @@ module ActiveRecord
#
# add_reference(:products, :supplier, foreign_key: {to_table: :firms})
#
- def add_reference(table_name, *args)
- ReferenceDefinition.new(*args).add_to(update_table_definition(table_name, self))
+ def add_reference(table_name, ref_name, **options)
+ ReferenceDefinition.new(ref_name, options).add_to(update_table_definition(table_name, self))
end
alias :add_belongs_to :add_reference
@@ -814,7 +890,7 @@ module ActiveRecord
#
# ====== Remove the reference
#
- # remove_reference(:products, :user, index: true)
+ # remove_reference(:products, :user, index: false)
#
# ====== Remove polymorphic reference
#
@@ -822,16 +898,22 @@ module ActiveRecord
#
# ====== Remove the reference with a foreign key
#
- # remove_reference(:products, :user, index: true, foreign_key: true)
+ # remove_reference(:products, :user, foreign_key: true)
#
- def remove_reference(table_name, ref_name, options = {})
- if options[:foreign_key]
+ def remove_reference(table_name, ref_name, foreign_key: false, polymorphic: false, **options)
+ if foreign_key
reference_name = Base.pluralize_table_names ? ref_name.to_s.pluralize : ref_name
- remove_foreign_key(table_name, reference_name)
+ if foreign_key.is_a?(Hash)
+ foreign_key_options = foreign_key
+ else
+ foreign_key_options = { to_table: reference_name }
+ end
+ foreign_key_options[:column] ||= "#{ref_name}_id"
+ remove_foreign_key(table_name, foreign_key_options)
end
remove_column(table_name, "#{ref_name}_id")
- remove_column(table_name, "#{ref_name}_type") if options[:polymorphic]
+ remove_column(table_name, "#{ref_name}_type") if polymorphic
end
alias :remove_belongs_to :remove_reference
@@ -883,6 +965,8 @@ module ActiveRecord
# Action that happens <tt>ON DELETE</tt>. Valid values are +:nullify+, +:cascade+ and +:restrict+
# [<tt>:on_update</tt>]
# Action that happens <tt>ON UPDATE</tt>. Valid values are +:nullify+, +:cascade+ and +:restrict+
+ # [<tt>:validate</tt>]
+ # (Postgres only) Specify whether or not the constraint should be validated. Defaults to +true+.
def add_foreign_key(from_table, to_table, options = {})
return unless supports_foreign_keys?
@@ -906,15 +990,22 @@ module ActiveRecord
#
# remove_foreign_key :accounts, column: :owner_id
#
+ # Removes the foreign key on +accounts.owner_id+.
+ #
+ # remove_foreign_key :accounts, to_table: :owners
+ #
# Removes the foreign key named +special_fk_name+ on the +accounts+ table.
#
# remove_foreign_key :accounts, name: :special_fk_name
#
- # The +options+ hash accepts the same keys as SchemaStatements#add_foreign_key.
- def remove_foreign_key(from_table, options_or_to_table = {})
+ # The +options+ hash accepts the same keys as SchemaStatements#add_foreign_key
+ # with an addition of
+ # [<tt>:to_table</tt>]
+ # The name of the table that contains the referenced primary key.
+ def remove_foreign_key(from_table, to_table = nil, **options)
return unless supports_foreign_keys?
- fk_name_to_delete = foreign_key_for!(from_table, options_or_to_table).name
+ fk_name_to_delete = foreign_key_for!(from_table, to_table: to_table, **options).name
at = create_alter_table from_table
at.drop_foreign_key fk_name_to_delete
@@ -924,33 +1015,21 @@ module ActiveRecord
# Checks to see if a foreign key exists on a table for a given foreign key definition.
#
- # # Check a foreign key exists
+ # # Checks to see if a foreign key exists.
# foreign_key_exists?(:accounts, :branches)
#
- # # Check a foreign key on a specified column exists
+ # # Checks to see if a foreign key on a specified column exists.
# foreign_key_exists?(:accounts, column: :owner_id)
#
- # # Check a foreign key with a custom name exists
+ # # Checks to see if a foreign key with a custom name exists.
# foreign_key_exists?(:accounts, name: "special_fk_name")
#
- def foreign_key_exists?(from_table, options_or_to_table = {})
- foreign_key_for(from_table, options_or_to_table).present?
- end
-
- def foreign_key_for(from_table, options_or_to_table = {}) # :nodoc:
- return unless supports_foreign_keys?
- foreign_keys(from_table).detect {|fk| fk.defined_for? options_or_to_table }
- end
-
- def foreign_key_for!(from_table, options_or_to_table = {}) # :nodoc:
- foreign_key_for(from_table, options_or_to_table) or \
- raise ArgumentError, "Table '#{from_table}' has no foreign key for #{options_or_to_table}"
+ def foreign_key_exists?(from_table, to_table = nil, **options)
+ foreign_key_for(from_table, to_table: to_table, **options).present?
end
def foreign_key_column_for(table_name) # :nodoc:
- prefix = Base.table_name_prefix
- suffix = Base.table_name_suffix
- name = table_name.to_s =~ /#{prefix}(.+)#{suffix}/ ? $1 : table_name.to_s
+ name = strip_table_name_prefix_and_suffix(table_name)
"#{name.singularize}_id"
end
@@ -962,53 +1041,43 @@ module ActiveRecord
end
def dump_schema_information #:nodoc:
- sm_table = ActiveRecord::Migrator.schema_migrations_table_name
-
- sql = "INSERT INTO #{sm_table} (version) VALUES "
- sql << ActiveRecord::SchemaMigration.order('version').pluck(:version).map {|v| "('#{v}')" }.join(', ')
- sql << ";\n\n"
- end
-
- # Should not be called normally, but this operation is non-destructive.
- # The migrations module handles this automatically.
- def initialize_schema_migrations_table
- ActiveRecord::SchemaMigration.create_table
- end
-
- def initialize_internal_metadata_table
- ActiveRecord::InternalMetadata.create_table
+ versions = ActiveRecord::SchemaMigration.all_versions
+ insert_versions_sql(versions) if versions.any?
end
def internal_string_options_for_primary_key # :nodoc:
{ primary_key: true }
end
- def assume_migrated_upto_version(version, migrations_paths)
- migrations_paths = Array(migrations_paths)
+ def assume_migrated_upto_version(version, migrations_paths = nil)
+ unless migrations_paths.nil?
+ ActiveSupport::Deprecation.warn(<<~MSG.squish)
+ Passing migrations_paths to #assume_migrated_upto_version is deprecated and will be removed in Rails 6.1.
+ MSG
+ end
+
version = version.to_i
- sm_table = quote_table_name(ActiveRecord::Migrator.schema_migrations_table_name)
+ sm_table = quote_table_name(ActiveRecord::SchemaMigration.table_name)
- migrated = select_values("SELECT version FROM #{sm_table}").map(&:to_i)
- paths = migrations_paths.map {|p| "#{p}/[0-9]*_*.rb" }
- versions = Dir[*paths].map do |filename|
- filename.split('/').last.split('_').first.to_i
- end
+ migrated = migration_context.get_all_versions
+ versions = migration_context.migrations.map(&:version)
unless migrated.include?(version)
- execute "INSERT INTO #{sm_table} (version) VALUES ('#{version}')"
+ execute "INSERT INTO #{sm_table} (version) VALUES (#{quote(version)})"
end
- inserting = (versions - migrated).select {|v| v < version}
+ inserting = (versions - migrated).select { |v| v < version }
if inserting.any?
- if (duplicate = inserting.detect {|v| inserting.count(v) > 1})
+ if (duplicate = inserting.detect { |v| inserting.count(v) > 1 })
raise "Duplicate migration #{duplicate}. Please renumber your migrations to resolve the conflict."
end
- execute "INSERT INTO #{sm_table} (version) VALUES #{inserting.map {|v| "('#{v}')"}.join(', ') }"
+ execute insert_versions_sql(inserting)
end
end
- def type_to_sql(type, limit = nil, precision = nil, scale = nil) #:nodoc:
- if native = native_database_types[type.to_sym]
+ def type_to_sql(type, limit: nil, precision: nil, scale: nil, **) # :nodoc:
+ type = type.to_sym if type
+ if native = native_database_types[type]
column_type_sql = (native.is_a?(Hash) ? native[:name] : native).dup
if type == :decimal # ignore limit, use precision and scale
@@ -1024,7 +1093,7 @@ module ActiveRecord
raise ArgumentError, "Error adding decimal column: precision cannot be empty if scale is specified"
end
- elsif [:datetime, :time].include?(type) && precision ||= native[:precision]
+ elsif [:datetime, :timestamp, :time, :interval].include?(type) && precision ||= native[:precision]
if (0..6) === precision
column_type_sql << "(#{precision})"
else
@@ -1041,7 +1110,7 @@ module ActiveRecord
end
# Given a set of columns and an ORDER BY clause, returns the columns for a SELECT DISTINCT.
- # PostgreSQL, MySQL, and Oracle overrides this for custom DISTINCT syntax - they
+ # PostgreSQL, MySQL, and Oracle override this for custom DISTINCT syntax - they
# require the order columns appear in the SELECT.
#
# columns_for_distinct("posts.id", ["posts.created_at desc"])
@@ -1058,6 +1127,10 @@ module ActiveRecord
def add_timestamps(table_name, options = {})
options[:null] = false if options[:null].nil?
+ if !options.key?(:precision) && supports_datetime_with_precision?
+ options[:precision] = 6
+ end
+
add_column table_name, :created_at, :datetime, options
add_column table_name, :updated_at, :datetime, options
end
@@ -1075,16 +1148,15 @@ module ActiveRecord
Table.new(table_name, base)
end
- def add_index_options(table_name, column_name, options = {}) #:nodoc:
- column_names = Array(column_name)
+ def add_index_options(table_name, column_name, comment: nil, **options) # :nodoc:
+ column_names = index_column_names(column_name)
- options.assert_valid_keys(:unique, :order, :name, :where, :length, :internal, :using, :algorithm, :type)
+ options.assert_valid_keys(:unique, :order, :name, :where, :length, :internal, :using, :algorithm, :type, :opclass)
index_type = options[:type].to_s if options.key?(:type)
index_type ||= options[:unique] ? "UNIQUE" : ""
index_name = options[:name].to_s if options.key?(:name)
- index_name ||= index_name(table_name, column: column_names)
- max_index_length = options.fetch(:internal, false) ? index_name_length : allowed_index_name_length
+ index_name ||= index_name(table_name, column_names)
if options.key?(:algorithm)
algorithm = index_algorithms.fetch(options[:algorithm]) {
@@ -1098,66 +1170,88 @@ module ActiveRecord
index_options = options[:where] ? " WHERE #{options[:where]}" : ""
end
- if index_name.length > max_index_length
- raise ArgumentError, "Index name '#{index_name}' on table '#{table_name}' is too long; the limit is #{max_index_length} characters"
- end
- if data_source_exists?(table_name) && index_name_exists?(table_name, index_name, false)
+ validate_index_length!(table_name, index_name, options.fetch(:internal, false))
+
+ if data_source_exists?(table_name) && index_name_exists?(table_name, index_name)
raise ArgumentError, "Index name '#{index_name}' on table '#{table_name}' already exists"
end
index_columns = quoted_columns_for_index(column_names, options).join(", ")
- [index_name, index_type, index_columns, index_options, algorithm, using]
+ [index_name, index_type, index_columns, index_options, algorithm, using, comment]
end
def options_include_default?(options)
options.include?(:default) && !(options[:null] == false && options[:default].nil?)
end
- protected
- def add_index_sort_order(option_strings, column_names, options = {})
- if options.is_a?(Hash) && order = options[:order]
- case order
- when Hash
- column_names.each {|name| option_strings[name] += " #{order[name].upcase}" if order.has_key?(name)}
- when String
- column_names.each {|name| option_strings[name] += " #{order.upcase}"}
- end
- end
+ # Changes the comment for a table or removes it if +nil+.
+ def change_table_comment(table_name, comment)
+ raise NotImplementedError, "#{self.class} does not support changing table comments"
+ end
+
+ # Changes the comment for a column or removes it if +nil+.
+ def change_column_comment(table_name, column_name, comment)
+ raise NotImplementedError, "#{self.class} does not support changing column comments"
+ end
+
+ def create_schema_dumper(options) # :nodoc:
+ SchemaDumper.create(self, options)
+ end
- return option_strings
+ private
+ def column_options_keys
+ [:limit, :precision, :scale, :default, :null, :collation, :comment]
+ end
+
+ def add_index_sort_order(quoted_columns, **options)
+ orders = options_for_index_columns(options[:order])
+ quoted_columns.each do |name, column|
+ column << " #{orders[name].upcase}" if orders[name].present?
+ end
end
- # Overridden by the MySQL adapter for supporting index lengths
- def quoted_columns_for_index(column_names, options = {})
- option_strings = Hash[column_names.map {|name| [name, '']}]
+ def options_for_index_columns(options)
+ if options.is_a?(Hash)
+ options.symbolize_keys
+ else
+ Hash.new { |hash, column| hash[column] = options }
+ end
+ end
- # add index sort order if supported
+ # Overridden by the MySQL adapter for supporting index lengths and by
+ # the PostgreSQL adapter for supporting operator classes.
+ def add_options_for_index_columns(quoted_columns, **options)
if supports_index_sort_order?
- option_strings = add_index_sort_order(option_strings, column_names, options)
+ quoted_columns = add_index_sort_order(quoted_columns, options)
end
- column_names.map {|name| quote_column_name(name) + option_strings[name]}
+ quoted_columns
+ end
+
+ def quoted_columns_for_index(column_names, **options)
+ return [column_names] if column_names.is_a?(String)
+
+ quoted_columns = Hash[column_names.map { |name| [name.to_sym, quote_column_name(name).dup] }]
+ add_options_for_index_columns(quoted_columns, options).values
end
def index_name_for_remove(table_name, options = {})
- # if the adapter doesn't support the indexes call the best we can do
- # is return the default index name for the options provided
- return index_name(table_name, options) unless respond_to?(:indexes)
+ return options[:name] if can_remove_index_by_name?(options)
checks = []
if options.is_a?(Hash)
- checks << lambda { |i| i.name == options[:name].to_s } if options.has_key?(:name)
- column_names = Array(options[:column]).map(&:to_s)
+ checks << lambda { |i| i.name == options[:name].to_s } if options.key?(:name)
+ column_names = index_column_names(options[:column])
else
- column_names = Array(options).map(&:to_s)
+ column_names = index_column_names(options)
end
- if column_names.any?
- checks << lambda { |i| i.columns.join('_and_') == column_names.join('_and_') }
+ if column_names.present?
+ checks << lambda { |i| index_name(table_name, i.columns) == index_name(table_name, column_names) }
end
- raise ArgumentError "No name or columns specified" if checks.none?
+ raise ArgumentError, "No name or columns specified" if checks.none?
matching_indexes = indexes(table_name).select { |i| checks.all? { |check| check[i] } }
@@ -1193,36 +1287,132 @@ module ActiveRecord
end
end
- private
- def create_table_definition(name, temporary = false, options = nil, as = nil)
- TableDefinition.new(name, temporary, options, as)
- end
+ def schema_creation
+ SchemaCreation.new(self)
+ end
- def create_alter_table(name)
- AlterTable.new create_table_definition(name)
- end
+ def create_table_definition(*args)
+ TableDefinition.new(self, *args)
+ end
- def foreign_key_name(table_name, options) # :nodoc:
- identifier = "#{table_name}_#{options.fetch(:column)}_fk"
- hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10)
- options.fetch(:name) do
- "fk_rails_#{hashed_identifier}"
+ def create_alter_table(name)
+ AlterTable.new create_table_definition(name)
end
- end
- def validate_index_length!(table_name, new_name) # :nodoc:
- if new_name.length > allowed_index_name_length
- raise ArgumentError, "Index name '#{new_name}' on table '#{table_name}' is too long; the limit is #{allowed_index_name_length} characters"
+ def fetch_type_metadata(sql_type)
+ cast_type = lookup_cast_type(sql_type)
+ SqlTypeMetadata.new(
+ sql_type: sql_type,
+ type: cast_type.type,
+ limit: cast_type.limit,
+ precision: cast_type.precision,
+ scale: cast_type.scale,
+ )
end
- end
- def extract_new_default_value(default_or_changes)
- if default_or_changes.is_a?(Hash) && default_or_changes.has_key?(:from) && default_or_changes.has_key?(:to)
- default_or_changes[:to]
- else
- default_or_changes
+ def index_column_names(column_names)
+ if column_names.is_a?(String) && /\W/.match?(column_names)
+ column_names
+ else
+ Array(column_names)
+ end
+ end
+
+ def index_name_options(column_names)
+ if column_names.is_a?(String) && /\W/.match?(column_names)
+ column_names = column_names.scan(/\w+/).join("_")
+ end
+
+ { column: column_names }
+ end
+
+ def strip_table_name_prefix_and_suffix(table_name)
+ prefix = Base.table_name_prefix
+ suffix = Base.table_name_suffix
+ table_name.to_s =~ /#{prefix}(.+)#{suffix}/ ? $1 : table_name.to_s
+ end
+
+ def foreign_key_name(table_name, options)
+ options.fetch(:name) do
+ identifier = "#{table_name}_#{options.fetch(:column)}_fk"
+ hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10)
+
+ "fk_rails_#{hashed_identifier}"
+ end
+ end
+
+ def foreign_key_for(from_table, **options)
+ return unless supports_foreign_keys?
+ foreign_keys(from_table).detect { |fk| fk.defined_for?(options) }
+ end
+
+ def foreign_key_for!(from_table, to_table: nil, **options)
+ foreign_key_for(from_table, to_table: to_table, **options) ||
+ raise(ArgumentError, "Table '#{from_table}' has no foreign key for #{to_table || options}")
+ end
+
+ def extract_foreign_key_action(specifier)
+ case specifier
+ when "CASCADE"; :cascade
+ when "SET NULL"; :nullify
+ when "RESTRICT"; :restrict
+ end
+ end
+
+ def validate_index_length!(table_name, new_name, internal = false)
+ max_index_length = internal ? index_name_length : allowed_index_name_length
+
+ if new_name.length > max_index_length
+ raise ArgumentError, "Index name '#{new_name}' on table '#{table_name}' is too long; the limit is #{allowed_index_name_length} characters"
+ end
+ end
+
+ def extract_new_default_value(default_or_changes)
+ if default_or_changes.is_a?(Hash) && default_or_changes.has_key?(:from) && default_or_changes.has_key?(:to)
+ default_or_changes[:to]
+ else
+ default_or_changes
+ end
+ end
+
+ def can_remove_index_by_name?(options)
+ options.is_a?(Hash) && options.key?(:name) && options.except(:name, :algorithm).empty?
+ end
+
+ def add_column_for_alter(table_name, column_name, type, options = {})
+ td = create_table_definition(table_name)
+ cd = td.new_column_definition(column_name, type, options)
+ schema_creation.accept(AddColumnDefinition.new(cd))
+ end
+
+ def remove_column_for_alter(table_name, column_name, type = nil, options = {})
+ "DROP COLUMN #{quote_column_name(column_name)}"
+ end
+
+ def remove_columns_for_alter(table_name, *column_names)
+ column_names.map { |column_name| remove_column_for_alter(table_name, column_name) }
+ end
+
+ def insert_versions_sql(versions)
+ sm_table = quote_table_name(ActiveRecord::SchemaMigration.table_name)
+
+ if versions.is_a?(Array)
+ sql = +"INSERT INTO #{sm_table} (version) VALUES\n"
+ sql << versions.map { |v| "(#{quote(v)})" }.join(",\n")
+ sql << ";\n\n"
+ sql
+ else
+ "INSERT INTO #{sm_table} (version) VALUES (#{quote(versions)});"
+ end
+ end
+
+ def data_source_sql(name = nil, type: nil)
+ raise NotImplementedError
+ end
+
+ def quoted_scope(name = nil, type: nil)
+ raise NotImplementedError
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/transaction.rb b/activerecord/lib/active_record/connection_adapters/abstract/transaction.rb
index ca795cb1ad..c9e84e48cc 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/transaction.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/transaction.rb
@@ -1,10 +1,15 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
class TransactionState
- VALID_STATES = Set.new([:committed, :rolledback, nil])
-
def initialize(state = nil)
@state = state
+ @children = []
+ end
+
+ def add_child(state)
+ @children << state
end
def finalized?
@@ -12,22 +17,49 @@ module ActiveRecord
end
def committed?
- @state == :committed
+ @state == :committed || @state == :fully_committed
+ end
+
+ def fully_committed?
+ @state == :fully_committed
end
def rolledback?
- @state == :rolledback
+ @state == :rolledback || @state == :fully_rolledback
+ end
+
+ def fully_rolledback?
+ @state == :fully_rolledback
+ end
+
+ def fully_completed?
+ completed?
end
def completed?
committed? || rolledback?
end
- def set_state(state)
- unless VALID_STATES.include?(state)
- raise ArgumentError, "Invalid transaction state: #{state}"
- end
- @state = state
+ def rollback!
+ @children.each { |c| c.rollback! }
+ @state = :rolledback
+ end
+
+ def full_rollback!
+ @children.each { |c| c.rollback! }
+ @state = :fully_rolledback
+ end
+
+ def commit!
+ @state = :committed
+ end
+
+ def full_commit!
+ @state = :fully_committed
+ end
+
+ def nullify!
+ @state = nil
end
end
@@ -41,14 +73,14 @@ module ActiveRecord
end
class Transaction #:nodoc:
-
- attr_reader :connection, :state, :records, :savepoint_name
- attr_writer :joinable
+ attr_reader :connection, :state, :records, :savepoint_name, :isolation_level
def initialize(connection, options, run_commit_callbacks: false)
@connection = connection
@state = TransactionState.new
@records = []
+ @isolation_level = options[:isolation]
+ @materialized = false
@joinable = options.fetch(:joinable, true)
@run_commit_callbacks = run_commit_callbacks
end
@@ -57,8 +89,12 @@ module ActiveRecord
records << record
end
- def rollback
- @state.set_state(:rolledback)
+ def materialize!
+ @materialized = true
+ end
+
+ def materialized?
+ @materialized
end
def rollback_records
@@ -72,10 +108,6 @@ module ActiveRecord
end
end
- def commit
- @state.set_state(:committed)
- end
-
def before_commit_records
records.uniq.each(&:before_committed!) if @run_commit_callbacks
end
@@ -87,7 +119,7 @@ module ActiveRecord
record.committed!
else
# if not running callbacks, only adds the record to the parent transaction
- record.add_to_transaction
+ connection.add_transaction_record(record)
end
end
ensure
@@ -101,47 +133,55 @@ module ActiveRecord
end
class SavepointTransaction < Transaction
+ def initialize(connection, savepoint_name, parent_transaction, *args)
+ super(connection, *args)
+
+ parent_transaction.state.add_child(@state)
- def initialize(connection, savepoint_name, options, *args)
- super(connection, options, *args)
- if options[:isolation]
+ if isolation_level
raise ActiveRecord::TransactionIsolationError, "cannot set transaction isolation in a nested transaction"
end
- connection.create_savepoint(@savepoint_name = savepoint_name)
+
+ @savepoint_name = savepoint_name
end
- def rollback
- connection.rollback_to_savepoint(savepoint_name)
+ def materialize!
+ connection.create_savepoint(savepoint_name)
super
end
+ def rollback
+ connection.rollback_to_savepoint(savepoint_name) if materialized?
+ @state.rollback!
+ end
+
def commit
- connection.release_savepoint(savepoint_name)
- super
+ connection.release_savepoint(savepoint_name) if materialized?
+ @state.commit!
end
def full_rollback?; false; end
end
class RealTransaction < Transaction
-
- def initialize(connection, options, *args)
- super
- if options[:isolation]
- connection.begin_isolated_db_transaction(options[:isolation])
+ def materialize!
+ if isolation_level
+ connection.begin_isolated_db_transaction(isolation_level)
else
connection.begin_db_transaction
end
+
+ super
end
def rollback
- connection.rollback_db_transaction
- super
+ connection.rollback_db_transaction if materialized?
+ @state.full_rollback!
end
def commit
- connection.commit_db_transaction
- super
+ connection.commit_db_transaction if materialized?
+ @state.full_commit!
end
end
@@ -149,60 +189,104 @@ module ActiveRecord
def initialize(connection)
@stack = []
@connection = connection
+ @has_unmaterialized_transactions = false
+ @materializing_transactions = false
+ @lazy_transactions_enabled = true
end
def begin_transaction(options = {})
- run_commit_callbacks = !current_transaction.joinable?
- transaction =
- if @stack.empty?
- RealTransaction.new(@connection, options, run_commit_callbacks: run_commit_callbacks)
+ @connection.lock.synchronize do
+ run_commit_callbacks = !current_transaction.joinable?
+ transaction =
+ if @stack.empty?
+ RealTransaction.new(@connection, options, run_commit_callbacks: run_commit_callbacks)
+ else
+ SavepointTransaction.new(@connection, "active_record_#{@stack.size}", @stack.last, options,
+ run_commit_callbacks: run_commit_callbacks)
+ end
+
+ if @connection.supports_lazy_transactions? && lazy_transactions_enabled? && options[:_lazy] != false
+ @has_unmaterialized_transactions = true
else
- SavepointTransaction.new(@connection, "active_record_#{@stack.size}", options,
- run_commit_callbacks: run_commit_callbacks)
+ transaction.materialize!
end
+ @stack.push(transaction)
+ transaction
+ end
+ end
- @stack.push(transaction)
- transaction
+ def disable_lazy_transactions!
+ materialize_transactions
+ @lazy_transactions_enabled = false
end
- def commit_transaction
- transaction = @stack.last
+ def enable_lazy_transactions!
+ @lazy_transactions_enabled = true
+ end
- begin
- transaction.before_commit_records
- ensure
- @stack.pop
+ def lazy_transactions_enabled?
+ @lazy_transactions_enabled
+ end
+
+ def materialize_transactions
+ return if @materializing_transactions
+ return unless @has_unmaterialized_transactions
+
+ @connection.lock.synchronize do
+ begin
+ @materializing_transactions = true
+ @stack.each { |t| t.materialize! unless t.materialized? }
+ ensure
+ @materializing_transactions = false
+ end
+ @has_unmaterialized_transactions = false
end
+ end
+
+ def commit_transaction
+ @connection.lock.synchronize do
+ transaction = @stack.last
+
+ begin
+ transaction.before_commit_records
+ ensure
+ @stack.pop
+ end
- transaction.commit
- transaction.commit_records
+ transaction.commit
+ transaction.commit_records
+ end
end
def rollback_transaction(transaction = nil)
- transaction ||= @stack.pop
- transaction.rollback
- transaction.rollback_records
+ @connection.lock.synchronize do
+ transaction ||= @stack.pop
+ transaction.rollback
+ transaction.rollback_records
+ end
end
def within_new_transaction(options = {})
- transaction = begin_transaction options
- yield
- rescue Exception => error
- if transaction
- rollback_transaction
- after_failure_actions(transaction, error)
- end
- raise
- ensure
- unless error
- if Thread.current.status == 'aborting'
- rollback_transaction if transaction
- else
- begin
- commit_transaction
- rescue Exception
- rollback_transaction(transaction) unless transaction.state.completed?
- raise
+ @connection.lock.synchronize do
+ transaction = begin_transaction options
+ yield
+ rescue Exception => error
+ if transaction
+ rollback_transaction
+ after_failure_actions(transaction, error)
+ end
+ raise
+ ensure
+ if !error && transaction
+ if Thread.current.status == "aborting"
+ rollback_transaction
+ else
+ begin
+ commit_transaction
+ rescue Exception
+ rollback_transaction(transaction) unless transaction.state.completed?
+ raise
+ end
end
end
end
@@ -226,7 +310,6 @@ module ActiveRecord
return unless error.is_a?(ActiveRecord::PreparedStatementCacheExpired)
@connection.clear_cache!
end
-
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract_adapter.rb b/activerecord/lib/active_record/connection_adapters/abstract_adapter.rb
index 069346253a..7aad306d50 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract_adapter.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract_adapter.rb
@@ -1,11 +1,16 @@
-require 'active_record/type'
-require 'active_record/connection_adapters/determine_if_preparable_visitor'
-require 'active_record/connection_adapters/schema_cache'
-require 'active_record/connection_adapters/sql_type_metadata'
-require 'active_record/connection_adapters/abstract/schema_dumper'
-require 'active_record/connection_adapters/abstract/schema_creation'
-require 'arel/collectors/bind'
-require 'arel/collectors/sql_string'
+# frozen_string_literal: true
+
+require "active_record/connection_adapters/determine_if_preparable_visitor"
+require "active_record/connection_adapters/schema_cache"
+require "active_record/connection_adapters/sql_type_metadata"
+require "active_record/connection_adapters/abstract/schema_dumper"
+require "active_record/connection_adapters/abstract/schema_creation"
+require "active_support/concurrency/load_interlock_aware_monitor"
+require "active_support/deprecation"
+require "arel/collectors/bind"
+require "arel/collectors/composite"
+require "arel/collectors/sql_string"
+require "arel/collectors/substitute_binds"
module ActiveRecord
module ConnectionAdapters # :nodoc:
@@ -14,7 +19,7 @@ module ActiveRecord
autoload :Column
autoload :ConnectionSpecification
- autoload_at 'active_record/connection_adapters/abstract/schema_definitions' do
+ autoload_at "active_record/connection_adapters/abstract/schema_definitions" do
autoload :IndexDefinition
autoload :ColumnDefinition
autoload :ChangeColumnDefinition
@@ -25,11 +30,11 @@ module ActiveRecord
autoload :ReferenceDefinition
end
- autoload_at 'active_record/connection_adapters/abstract/connection_pool' do
+ autoload_at "active_record/connection_adapters/abstract/connection_pool" do
autoload :ConnectionHandler
end
- autoload_under 'abstract' do
+ autoload_under "abstract" do
autoload :SchemaStatements
autoload :DatabaseStatements
autoload :DatabaseLimits
@@ -39,7 +44,7 @@ module ActiveRecord
autoload :Savepoints
end
- autoload_at 'active_record/connection_adapters/abstract/transaction' do
+ autoload_at "active_record/connection_adapters/abstract/transaction" do
autoload :TransactionManager
autoload :NullTransaction
autoload :RealTransaction
@@ -61,23 +66,27 @@ module ActiveRecord
# Most of the methods in the adapter are useful during migrations. Most
# notably, the instance methods provided by SchemaStatements are very useful.
class AbstractAdapter
- ADAPTER_NAME = 'Abstract'.freeze
+ ADAPTER_NAME = "Abstract"
+ include ActiveSupport::Callbacks
+ define_callbacks :checkout, :checkin
+
include Quoting, DatabaseStatements, SchemaStatements
include DatabaseLimits
include QueryCache
- include ActiveSupport::Callbacks
- include ColumnDumper
+ include Savepoints
SIMPLE_INT = /\A\d+\z/
- define_callbacks :checkout, :checkin
-
- attr_accessor :visitor, :pool
- attr_reader :schema_cache, :owner, :logger
+ attr_accessor :pool
+ attr_reader :schema_cache, :visitor, :owner, :logger, :lock, :prepared_statements, :prevent_writes
alias :in_use? :owner
+ set_callback :checkin, :after, :enable_lazy_transactions!
+
def self.type_cast_config_to_integer(config)
- if config =~ SIMPLE_INT
+ if config.is_a?(Integer)
+ config
+ elsif SIMPLE_INT.match?(config)
config.to_i
else
config
@@ -92,7 +101,10 @@ module ActiveRecord
end
end
- attr_reader :prepared_statements
+ def self.build_read_query_regexp(*parts) # :nodoc:
+ parts = parts.map { |part| /\A[\(\s]*#{part}/i }
+ Regexp.union(*parts)
+ end
def initialize(connection, logger = nil, config = {}) # :nodoc:
super()
@@ -103,61 +115,88 @@ module ActiveRecord
@logger = logger
@config = config
@pool = nil
+ @idle_since = Concurrent.monotonic_time
@schema_cache = SchemaCache.new self
- @visitor = nil
- @prepared_statements = false
@quoted_column_names, @quoted_table_names = {}, {}
+ @prevent_writes = false
+ @visitor = arel_visitor
+ @statements = build_statement_pool
+ @lock = ActiveSupport::Concurrency::LoadInterlockAwareMonitor.new
+
+ if self.class.type_cast_config_to_boolean(config.fetch(:prepared_statements) { true })
+ @prepared_statements = true
+ @visitor.extend(DetermineIfPreparableVisitor)
+ else
+ @prepared_statements = false
+ end
+
+ @advisory_locks_enabled = self.class.type_cast_config_to_boolean(
+ config.fetch(:advisory_locks, true)
+ )
+
+ check_version
end
- class Version
- include Comparable
+ def replica?
+ @config[:replica] || false
+ end
- def initialize(version_string)
- @version = version_string.split('.').map(&:to_i)
- end
+ # Determines whether writes are currently being prevents.
+ #
+ # Returns true if the connection is a replica, or if +prevent_writes+
+ # is set to true.
+ def preventing_writes?
+ replica? || prevent_writes
+ end
- def <=>(version_string)
- @version <=> version_string.split('.').map(&:to_i)
- end
+ # Prevent writing to the database regardless of role.
+ #
+ # In some cases you may want to prevent writes to the database
+ # even if you are on a database that can write. `while_preventing_writes`
+ # will prevent writes to the database for the duration of the block.
+ def while_preventing_writes
+ original, @prevent_writes = @prevent_writes, true
+ yield
+ ensure
+ @prevent_writes = original
end
- class BindCollector < Arel::Collectors::Bind
- def compile(bvs, conn)
- casted_binds = conn.prepare_binds_for_database(bvs)
- super(casted_binds.map { |value| conn.quote(value) })
- end
+ def migrations_paths # :nodoc:
+ @config[:migrations_paths] || Migrator.migrations_paths
end
- class SQLString < Arel::Collectors::SQLString
- def compile(bvs, conn)
- super(bvs)
- end
+ def migration_context # :nodoc:
+ MigrationContext.new(migrations_paths)
end
- def collector
- if prepared_statements
- SQLString.new
- else
- BindCollector.new
+ class Version
+ include Comparable
+
+ def initialize(version_string)
+ @version = version_string.split(".").map(&:to_i)
end
- end
- def valid_type?(type)
- true
+ def <=>(version_string)
+ @version <=> version_string.split(".").map(&:to_i)
+ end
+
+ def to_s
+ @version.join(".")
+ end
end
- def schema_creation
- SchemaCreation.new self
+ def valid_type?(type) # :nodoc:
+ !native_database_types[type].nil?
end
# this method must only be called while holding connection pool's mutex
def lease
if in_use?
- msg = 'Cannot lease connection, '
+ msg = +"Cannot lease connection, "
if @owner == Thread.current
- msg << 'it is already leased by the current thread.'
+ msg << "it is already leased by the current thread."
else
- msg << "it is already in use by a different thread: #{@owner}. " <<
+ msg << "it is already in use by a different thread: #{@owner}. " \
"Current thread: #{Thread.current}."
end
raise ActiveRecordError, msg
@@ -173,7 +212,37 @@ module ActiveRecord
# this method must only be called while holding connection pool's mutex
def expire
- @owner = nil
+ if in_use?
+ if @owner != Thread.current
+ raise ActiveRecordError, "Cannot expire connection, " \
+ "it is owned by a different thread: #{@owner}. " \
+ "Current thread: #{Thread.current}."
+ end
+
+ @idle_since = Concurrent.monotonic_time
+ @owner = nil
+ else
+ raise ActiveRecordError, "Cannot expire connection, it is not currently leased."
+ end
+ end
+
+ # this method must only be called while holding connection pool's mutex (and a desire for segfaults)
+ def steal! # :nodoc:
+ if in_use?
+ if @owner != Thread.current
+ pool.send :remove_connection_from_thread_cache, self, @owner
+
+ @owner = Thread.current
+ end
+ else
+ raise ActiveRecordError, "Cannot steal connection, it is not currently leased."
+ end
+ end
+
+ # Seconds since this connection was returned to the pool
+ def seconds_idle # :nodoc:
+ return 0 if in_use?
+ Concurrent.monotonic_time - @idle_since
end
def unprepared_statement
@@ -189,17 +258,6 @@ module ActiveRecord
self.class::ADAPTER_NAME
end
- # Does this adapter support migrations?
- def supports_migrations?
- false
- end
-
- # Can this adapter determine the primary key for tables not attached
- # to an Active Record class, such as join tables?
- def supports_primary_key?
- false
- end
-
# Does this adapter support DDL rollbacks in transactions? That is, would
# CREATE TABLE or ALTER TABLE get rolled back by a transaction?
def supports_ddl_transactions?
@@ -237,6 +295,11 @@ module ActiveRecord
false
end
+ # Does this adapter support expression indices?
+ def supports_expression_index?
+ false
+ end
+
# Does this adapter support explain?
def supports_explain?
false
@@ -263,11 +326,28 @@ module ActiveRecord
false
end
+ # Does this adapter support creating invalid constraints?
+ def supports_validate_constraints?
+ false
+ end
+
+ # Does this adapter support creating foreign key constraints
+ # in the same statement as creating the table?
+ def supports_foreign_keys_in_create?
+ supports_foreign_keys?
+ end
+ deprecate :supports_foreign_keys_in_create?
+
# Does this adapter support views?
def supports_views?
false
end
+ # Does this adapter support materialized views?
+ def supports_materialized_views?
+ false
+ end
+
# Does this adapter support datetime with precision?
def supports_datetime_with_precision?
false
@@ -278,6 +358,57 @@ module ActiveRecord
false
end
+ # Does this adapter support metadata comments on database objects (tables, columns, indexes)?
+ def supports_comments?
+ false
+ end
+
+ # Can comments for tables, columns, and indexes be specified in create/alter table statements?
+ def supports_comments_in_create?
+ false
+ end
+
+ # Does this adapter support multi-value insert?
+ def supports_multi_insert?
+ true
+ end
+ deprecate :supports_multi_insert?
+
+ # Does this adapter support virtual columns?
+ def supports_virtual_columns?
+ false
+ end
+
+ # Does this adapter support foreign/external tables?
+ def supports_foreign_tables?
+ false
+ end
+
+ # Does this adapter support optimizer hints?
+ def supports_optimizer_hints?
+ false
+ end
+
+ def supports_lazy_transactions?
+ false
+ end
+
+ def supports_insert_returning?
+ false
+ end
+
+ def supports_insert_on_duplicate_skip?
+ false
+ end
+
+ def supports_insert_on_duplicate_update?
+ false
+ end
+
+ def supports_insert_conflict_target?
+ false
+ end
+
# This is meant to be implemented by the adapters that support extensions
def disable_extension(name)
end
@@ -286,6 +417,10 @@ module ActiveRecord
def enable_extension(name)
end
+ def advisory_locks_enabled? # :nodoc:
+ supports_advisory_locks? && @advisory_locks_enabled
+ end
+
# This is meant to be implemented by the adapters that support advisory
# locks
#
@@ -340,6 +475,19 @@ module ActiveRecord
reset_transaction
end
+ # Immediately forget this connection ever existed. Unlike disconnect!,
+ # this will not communicate with the server.
+ #
+ # After calling this method, the behavior of all other methods becomes
+ # undefined. This is called internally just before a forked process gets
+ # rid of a connection that belonged to its parent.
+ def discard!
+ # This should be overridden by concrete adapters.
+ #
+ # Prevent @connection's finalizer from touching the socket, or
+ # otherwise communicating with its server, when it is collected.
+ end
+
# Reset the state of this connection, directing the DBMS to clear
# transactions and other connection-related server-side state. Usually a
# database-dependent operation.
@@ -350,11 +498,9 @@ module ActiveRecord
# this should be overridden by concrete adapters
end
- ###
- # Clear any caching the database adapter may be doing, for example
- # clearing the prepared statement cache. This is database specific.
+ # Clear any caching the database adapter may be doing.
def clear_cache!
- # this should be overridden by concrete adapters
+ @lock.synchronize { @statements.clear } if @statements
end
# Returns true if its required to reload the connection between requests for development mode.
@@ -365,39 +511,36 @@ module ActiveRecord
# Checks whether the connection to the database is still active (i.e. not stale).
# This is done under the hood by calling #active?. If the connection
# is no longer active, then this method will reconnect to the database.
- def verify!(*ignored)
+ def verify!
reconnect! unless active?
end
# Provides access to the underlying database driver for this adapter. For
# example, this method returns a Mysql2::Client object in case of Mysql2Adapter,
- # and a PGconn object in case of PostgreSQLAdapter.
+ # and a PG::Connection object in case of PostgreSQLAdapter.
#
# This is useful for when you need to call a proprietary method such as
# PostgreSQL's lo_* methods.
def raw_connection
+ disable_lazy_transactions!
@connection
end
- def create_savepoint(name = nil)
+ def default_uniqueness_comparison(attribute, value, klass) # :nodoc:
+ attribute.eq(value)
end
- def release_savepoint(name = nil)
+ def case_sensitive_comparison(attribute, value) # :nodoc:
+ attribute.eq(value)
end
- def case_sensitive_comparison(table, attribute, column, value)
- if value.nil?
- table[attribute].eq(value)
- else
- table[attribute].eq(Arel::Nodes::BindParam.new)
- end
- end
+ def case_insensitive_comparison(attribute, value) # :nodoc:
+ column = column_for_attribute(attribute)
- def case_insensitive_comparison(table, attribute, column, value)
if can_perform_case_insensitive_comparison_for?(column)
- table[attribute].lower.eq(table.lower(Arel::Nodes::BindParam.new))
+ attribute.lower.eq(attribute.relation.lower(value))
else
- table[attribute].eq(Arel::Nodes::BindParam.new)
+ attribute.eq(value)
end
end
@@ -406,143 +549,182 @@ module ActiveRecord
end
private :can_perform_case_insensitive_comparison_for?
- def current_savepoint_name
- current_transaction.savepoint_name
- end
-
# Check the connection back in to the connection pool
def close
pool.checkin self
end
- def type_map # :nodoc:
- @type_map ||= Type::TypeMap.new.tap do |mapping|
- initialize_type_map(mapping)
- end
+ def column_name_for_operation(operation, node) # :nodoc:
+ visitor.compile(node)
end
- def new_column(name, default, sql_type_metadata, null, table_name, default_function = nil, collation = nil) # :nodoc:
- Column.new(name, default, sql_type_metadata, null, table_name, default_function, collation)
+ def default_index_type?(index) # :nodoc:
+ index.using.nil?
end
- def lookup_cast_type(sql_type) # :nodoc:
- type_map.lookup(sql_type)
+ # Called by ActiveRecord::InsertAll,
+ # Passed an instance of ActiveRecord::InsertAll::Builder,
+ # This method implements standard bulk inserts for all databases, but
+ # should be overridden by adapters to implement common features with
+ # non-standard syntax like handling duplicates or returning values.
+ def build_insert_sql(insert) # :nodoc:
+ if insert.skip_duplicates? || insert.update_duplicates?
+ raise NotImplementedError, "#{self.class} should define `build_insert_sql` to implement adapter-specific logic for handling duplicates during INSERT"
+ end
+
+ "INSERT #{insert.into} #{insert.values_list}"
end
- def column_name_for_operation(operation, node) # :nodoc:
- visitor.accept(node, collector).value
- end
-
- protected
-
- def initialize_type_map(m) # :nodoc:
- register_class_with_limit m, %r(boolean)i, Type::Boolean
- register_class_with_limit m, %r(char)i, Type::String
- register_class_with_limit m, %r(binary)i, Type::Binary
- register_class_with_limit m, %r(text)i, Type::Text
- register_class_with_precision m, %r(date)i, Type::Date
- register_class_with_precision m, %r(time)i, Type::Time
- register_class_with_precision m, %r(datetime)i, Type::DateTime
- register_class_with_limit m, %r(float)i, Type::Float
- register_class_with_limit m, %r(int)i, Type::Integer
-
- m.alias_type %r(blob)i, 'binary'
- m.alias_type %r(clob)i, 'text'
- m.alias_type %r(timestamp)i, 'datetime'
- m.alias_type %r(numeric)i, 'decimal'
- m.alias_type %r(number)i, 'decimal'
- m.alias_type %r(double)i, 'float'
-
- m.register_type(%r(decimal)i) do |sql_type|
- scale = extract_scale(sql_type)
- precision = extract_precision(sql_type)
-
- if scale == 0
- # FIXME: Remove this class as well
- Type::DecimalWithoutScale.new(precision: precision)
- else
- Type::Decimal.new(precision: precision, scale: scale)
+ private
+ def check_version
+ end
+
+ def type_map
+ @type_map ||= Type::TypeMap.new.tap do |mapping|
+ initialize_type_map(mapping)
end
end
- end
- def reload_type_map # :nodoc:
- type_map.clear
- initialize_type_map(type_map)
- end
+ def initialize_type_map(m = type_map)
+ register_class_with_limit m, %r(boolean)i, Type::Boolean
+ register_class_with_limit m, %r(char)i, Type::String
+ register_class_with_limit m, %r(binary)i, Type::Binary
+ register_class_with_limit m, %r(text)i, Type::Text
+ register_class_with_precision m, %r(date)i, Type::Date
+ register_class_with_precision m, %r(time)i, Type::Time
+ register_class_with_precision m, %r(datetime)i, Type::DateTime
+ register_class_with_limit m, %r(float)i, Type::Float
+ register_class_with_limit m, %r(int)i, Type::Integer
+
+ m.alias_type %r(blob)i, "binary"
+ m.alias_type %r(clob)i, "text"
+ m.alias_type %r(timestamp)i, "datetime"
+ m.alias_type %r(numeric)i, "decimal"
+ m.alias_type %r(number)i, "decimal"
+ m.alias_type %r(double)i, "float"
+
+ m.register_type %r(^json)i, Type::Json.new
+
+ m.register_type(%r(decimal)i) do |sql_type|
+ scale = extract_scale(sql_type)
+ precision = extract_precision(sql_type)
+
+ if scale == 0
+ # FIXME: Remove this class as well
+ Type::DecimalWithoutScale.new(precision: precision)
+ else
+ Type::Decimal.new(precision: precision, scale: scale)
+ end
+ end
+ end
- def register_class_with_limit(mapping, key, klass) # :nodoc:
- mapping.register_type(key) do |*args|
- limit = extract_limit(args.last)
- klass.new(limit: limit)
+ def reload_type_map
+ type_map.clear
+ initialize_type_map
end
- end
- def register_class_with_precision(mapping, key, klass) # :nodoc:
- mapping.register_type(key) do |*args|
- precision = extract_precision(args.last)
- klass.new(precision: precision)
+ def register_class_with_limit(mapping, key, klass)
+ mapping.register_type(key) do |*args|
+ limit = extract_limit(args.last)
+ klass.new(limit: limit)
+ end
+ end
+
+ def register_class_with_precision(mapping, key, klass)
+ mapping.register_type(key) do |*args|
+ precision = extract_precision(args.last)
+ klass.new(precision: precision)
+ end
end
- end
- def extract_scale(sql_type) # :nodoc:
- case sql_type
+ def extract_scale(sql_type)
+ case sql_type
when /\((\d+)\)/ then 0
when /\((\d+)(,(\d+))\)/ then $3.to_i
+ end
end
- end
- def extract_precision(sql_type) # :nodoc:
- $1.to_i if sql_type =~ /\((\d+)(,\d+)?\)/
- end
+ def extract_precision(sql_type)
+ $1.to_i if sql_type =~ /\((\d+)(,\d+)?\)/
+ end
- def extract_limit(sql_type) # :nodoc:
- case sql_type
- when /^bigint/i
- 8
- when /\((.*)\)/
- $1.to_i
+ def extract_limit(sql_type)
+ $1.to_i if sql_type =~ /\((.*)\)/
end
- end
- def translate_exception_class(e, sql)
- begin
- message = "#{e.class.name}: #{e.message}: #{sql}"
- rescue Encoding::CompatibilityError
- message = "#{e.class.name}: #{e.message.force_encoding sql.encoding}: #{sql}"
+ def translate_exception_class(e, sql, binds)
+ message = "#{e.class.name}: #{e.message}"
+
+ exception = translate_exception(
+ e, message: message, sql: sql, binds: binds
+ )
+ exception.set_backtrace e.backtrace
+ exception
end
- exception = translate_exception(e, message)
- exception.set_backtrace e.backtrace
- exception
- end
+ def log(sql, name = "SQL", binds = [], type_casted_binds = [], statement_name = nil) # :doc:
+ @instrumenter.instrument(
+ "sql.active_record",
+ sql: sql,
+ name: name,
+ binds: binds,
+ type_casted_binds: type_casted_binds,
+ statement_name: statement_name,
+ connection_id: object_id,
+ connection: self) do
+ @lock.synchronize do
+ yield
+ end
+ rescue => e
+ raise translate_exception_class(e, sql, binds)
+ end
+ end
- def log(sql, name = "SQL", binds = [], statement_name = nil)
- @instrumenter.instrument(
- "sql.active_record",
- :sql => sql,
- :name => name,
- :connection_id => object_id,
- :statement_name => statement_name,
- :binds => binds) { yield }
- rescue => e
- raise translate_exception_class(e, sql)
- end
+ def translate_exception(exception, message:, sql:, binds:)
+ # override in derived class
+ case exception
+ when RuntimeError
+ exception
+ else
+ ActiveRecord::StatementInvalid.new(message, sql: sql, binds: binds)
+ end
+ end
- def translate_exception(exception, message)
- # override in derived class
- ActiveRecord::StatementInvalid.new(message)
- end
+ def without_prepared_statement?(binds)
+ !prepared_statements || binds.empty?
+ end
- def without_prepared_statement?(binds)
- !prepared_statements || binds.empty?
- end
+ def column_for(table_name, column_name)
+ column_name = column_name.to_s
+ columns(table_name).detect { |c| c.name == column_name } ||
+ raise(ActiveRecordError, "No such column: #{table_name}.#{column_name}")
+ end
- def column_for(table_name, column_name) # :nodoc:
- column_name = column_name.to_s
- columns(table_name).detect { |c| c.name == column_name } ||
- raise(ActiveRecordError, "No such column: #{table_name}.#{column_name}")
- end
+ def column_for_attribute(attribute)
+ table_name = attribute.relation.name
+ schema_cache.columns_hash(table_name)[attribute.name.to_s]
+ end
+
+ def collector
+ if prepared_statements
+ Arel::Collectors::Composite.new(
+ Arel::Collectors::SQLString.new,
+ Arel::Collectors::Bind.new,
+ )
+ else
+ Arel::Collectors::SubstituteBinds.new(
+ self,
+ Arel::Collectors::SQLString.new,
+ )
+ end
+ end
+
+ def arel_visitor
+ Arel::Visitors::ToSql.new(self)
+ end
+
+ def build_statement_pool
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb b/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb
index 8015d1ed9e..8ca2cfa9ed 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb
@@ -1,28 +1,21 @@
-require 'active_record/connection_adapters/abstract_adapter'
-require 'active_record/connection_adapters/mysql/column'
-require 'active_record/connection_adapters/mysql/explain_pretty_printer'
-require 'active_record/connection_adapters/mysql/quoting'
-require 'active_record/connection_adapters/mysql/schema_creation'
-require 'active_record/connection_adapters/mysql/schema_definitions'
-require 'active_record/connection_adapters/mysql/schema_dumper'
-require 'active_record/connection_adapters/mysql/type_metadata'
-
-require 'active_support/core_ext/string/strip'
+# frozen_string_literal: true
+
+require "active_record/connection_adapters/abstract_adapter"
+require "active_record/connection_adapters/statement_pool"
+require "active_record/connection_adapters/mysql/column"
+require "active_record/connection_adapters/mysql/explain_pretty_printer"
+require "active_record/connection_adapters/mysql/quoting"
+require "active_record/connection_adapters/mysql/schema_creation"
+require "active_record/connection_adapters/mysql/schema_definitions"
+require "active_record/connection_adapters/mysql/schema_dumper"
+require "active_record/connection_adapters/mysql/schema_statements"
+require "active_record/connection_adapters/mysql/type_metadata"
module ActiveRecord
module ConnectionAdapters
class AbstractMysqlAdapter < AbstractAdapter
include MySQL::Quoting
- include MySQL::ColumnDumper
- include Savepoints
-
- def update_table_definition(table_name, base) # :nodoc:
- MySQL::Table.new(table_name, base)
- end
-
- def schema_creation
- MySQL::SchemaCreation.new(self)
- end
+ include MySQL::SchemaStatements
##
# :singleton-method:
@@ -31,75 +24,55 @@ module ActiveRecord
# to your application.rb file:
#
# ActiveRecord::ConnectionAdapters::Mysql2Adapter.emulate_booleans = false
- class_attribute :emulate_booleans
- self.emulate_booleans = true
-
- QUOTED_TRUE, QUOTED_FALSE = '1', '0'
+ class_attribute :emulate_booleans, default: true
NATIVE_DATABASE_TYPES = {
- primary_key: "int auto_increment PRIMARY KEY",
+ primary_key: "bigint auto_increment PRIMARY KEY",
string: { name: "varchar", limit: 255 },
text: { name: "text" },
integer: { name: "int", limit: 4 },
- float: { name: "float" },
+ float: { name: "float", limit: 24 },
decimal: { name: "decimal" },
datetime: { name: "datetime" },
+ timestamp: { name: "timestamp" },
time: { name: "time" },
date: { name: "date" },
binary: { name: "blob" },
+ blob: { name: "blob" },
boolean: { name: "tinyint", limit: 1 },
json: { name: "json" },
}
- INDEX_TYPES = [:fulltext, :spatial]
- INDEX_USINGS = [:btree, :hash]
-
- def initialize(connection, logger, connection_options, config)
- super(connection, logger, config)
-
- @visitor = Arel::Visitors::MySQL.new self
-
- if self.class.type_cast_config_to_boolean(config.fetch(:prepared_statements) { true })
- @prepared_statements = true
- @visitor.extend(DetermineIfPreparableVisitor)
- else
- @prepared_statements = false
- end
-
- if version < '5.0.0'
- raise "Your version of MySQL (#{full_version.match(/^\d+\.\d+\.\d+/)[0]}) is too old. Active Record supports MySQL >= 5.0."
- end
- end
-
- CHARSETS_OF_4BYTES_MAXLEN = ['utf8mb4', 'utf16', 'utf16le', 'utf32']
+ class StatementPool < ConnectionAdapters::StatementPool # :nodoc:
+ private
- def internal_string_options_for_primary_key # :nodoc:
- super.tap { |options|
- options[:collation] = collation.sub(/\A[^_]+/, 'utf8') if CHARSETS_OF_4BYTES_MAXLEN.include?(charset)
- }
+ def dealloc(stmt)
+ stmt.close
+ end
end
- def version
- @version ||= Version.new(full_version.match(/^\d+\.\d+\.\d+/)[0])
+ def initialize(connection, logger, connection_options, config)
+ super(connection, logger, config)
end
- # Returns true, since this connection adapter supports migrations.
- def supports_migrations?
- true
+ def version #:nodoc:
+ @version ||= Version.new(version_string)
end
- def supports_primary_key?
- true
+ def mariadb? # :nodoc:
+ /mariadb/i.match?(full_version)
end
def supports_bulk_alter? #:nodoc:
true
end
- # Technically MySQL allows to create indexes with the sort order syntax
- # but at the moment (5.5) it doesn't yet implement them
def supports_index_sort_order?
- true
+ !mariadb? && version >= "8.0.1"
+ end
+
+ def supports_expression_index?
+ !mariadb? && version >= "8.0.13"
end
def supports_transaction_isolation?
@@ -123,19 +96,36 @@ module ActiveRecord
end
def supports_datetime_with_precision?
- version >= '5.6.4'
+ mariadb? || version >= "5.6.4"
+ end
+
+ def supports_virtual_columns?
+ mariadb? || version >= "5.7.5"
+ end
+
+ # See https://dev.mysql.com/doc/refman/8.0/en/optimizer-hints.html for more details.
+ def supports_optimizer_hints?
+ !mariadb? && version >= "5.7.7"
end
def supports_advisory_locks?
true
end
+ def supports_insert_on_duplicate_skip?
+ true
+ end
+
+ def supports_insert_on_duplicate_update?
+ true
+ end
+
def get_advisory_lock(lock_name, timeout = 0) # :nodoc:
- select_value("SELECT GET_LOCK('#{lock_name}', #{timeout});").to_s == '1'
+ query_value("SELECT GET_LOCK(#{quote(lock_name.to_s)}, #{timeout})") == 1
end
def release_advisory_lock(lock_name) # :nodoc:
- select_value("SELECT RELEASE_LOCK('#{lock_name}')").to_s == '1'
+ query_value("SELECT RELEASE_LOCK(#{quote(lock_name.to_s)})") == 1
end
def native_database_types
@@ -143,7 +133,7 @@ module ActiveRecord
end
def index_algorithms
- { default: 'ALGORITHM = DEFAULT', copy: 'ALGORITHM = COPY', inplace: 'ALGORITHM = INPLACE' }
+ { default: +"ALGORITHM = DEFAULT", copy: +"ALGORITHM = COPY", inplace: +"ALGORITHM = INPLACE" }
end
# HELPER METHODS ===========================================
@@ -154,48 +144,16 @@ module ActiveRecord
raise NotImplementedError
end
- def new_column(field, default, sql_type_metadata, null, table_name, default_function = nil, collation = nil) # :nodoc:
- MySQL::Column.new(field, default, sql_type_metadata, null, table_name, default_function, collation)
- end
-
# Must return the MySQL error number from the exception, if the exception has an
# error number.
def error_number(exception) # :nodoc:
raise NotImplementedError
end
- #--
- # QUOTING ==================================================
- #++
-
- def quoted_true
- QUOTED_TRUE
- end
-
- def unquoted_true
- 1
- end
-
- def quoted_false
- QUOTED_FALSE
- end
-
- def unquoted_false
- 0
- end
-
- def quoted_date(value)
- if supports_datetime_with_precision?
- super
- else
- super.sub(/\.\d{6}\z/, '')
- end
- end
-
# REFERENTIAL INTEGRITY ====================================
def disable_referential_integrity #:nodoc:
- old = select_value("SELECT @@FOREIGN_KEY_CHECKS")
+ old = query_value("SELECT @@FOREIGN_KEY_CHECKS")
begin
update("SET FOREIGN_KEY_CHECKS = 0")
@@ -205,27 +163,35 @@ module ActiveRecord
end
end
+ # CONNECTION MANAGEMENT ====================================
+
+ def clear_cache! # :nodoc:
+ reload_type_map
+ super
+ end
+
#--
# DATABASE STATEMENTS ======================================
#++
def explain(arel, binds = [])
sql = "EXPLAIN #{to_sql(arel, binds)}"
- start = Time.now
- result = exec_query(sql, 'EXPLAIN', binds)
- elapsed = Time.now - start
+ start = Concurrent.monotonic_time
+ result = exec_query(sql, "EXPLAIN", binds)
+ elapsed = Concurrent.monotonic_time - start
MySQL::ExplainPrettyPrinter.new.pp(result, elapsed)
end
- def clear_cache!
- super
- reload_type_map
- end
-
# Executes the SQL statement in the context of this connection.
def execute(sql, name = nil)
- log(sql, name) { @connection.query(sql) }
+ materialize_transactions
+
+ log(sql, name) do
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ @connection.query(sql)
+ end
+ end
end
# Mysql2Adapter doesn't have to free a result after using it, but we use this method
@@ -252,19 +218,7 @@ module ActiveRecord
execute "ROLLBACK"
end
- # In the simple case, MySQL allows us to place JOINs directly into the UPDATE
- # query. However, this does not allow for LIMIT, OFFSET and ORDER. To support
- # these, we must use a subquery.
- def join_to_update(update, select, key) # :nodoc:
- if select.limit || select.offset || select.orders.any?
- super
- else
- update.table select.source
- update.wheres = select.constraints
- end
- end
-
- def empty_insert_statement_value
+ def empty_insert_statement_value(primary_key = nil)
"VALUES ()"
end
@@ -280,7 +234,7 @@ module ActiveRecord
end
# Create a new MySQL database with optional <tt>:charset</tt> and <tt>:collation</tt>.
- # Charset defaults to utf8.
+ # Charset defaults to utf8mb4.
#
# Example:
# create_database 'charset_test', charset: 'latin1', collation: 'latin1_bin'
@@ -288,9 +242,13 @@ module ActiveRecord
# create_database 'matt_development', charset: :big5
def create_database(name, options = {})
if options[:collation]
- execute "CREATE DATABASE `#{name}` DEFAULT CHARACTER SET `#{options[:charset] || 'utf8'}` COLLATE `#{options[:collation]}`"
+ execute "CREATE DATABASE #{quote_table_name(name)} DEFAULT COLLATE #{quote_table_name(options[:collation])}"
+ elsif options[:charset]
+ execute "CREATE DATABASE #{quote_table_name(name)} DEFAULT CHARACTER SET #{quote_table_name(options[:charset])}"
+ elsif row_format_dynamic_by_default?
+ execute "CREATE DATABASE #{quote_table_name(name)} DEFAULT CHARACTER SET `utf8mb4`"
else
- execute "CREATE DATABASE `#{name}` DEFAULT CHARACTER SET `#{options[:charset] || 'utf8'}`"
+ raise "Configure a supported :charset and ensure innodb_large_prefix is enabled to support indexes on varchar(255) string columns."
end
end
@@ -299,135 +257,38 @@ module ActiveRecord
# Example:
# drop_database('sebastian_development')
def drop_database(name) #:nodoc:
- execute "DROP DATABASE IF EXISTS `#{name}`"
+ execute "DROP DATABASE IF EXISTS #{quote_table_name(name)}"
end
def current_database
- select_value 'SELECT DATABASE() as db'
+ query_value("SELECT database()", "SCHEMA")
end
# Returns the database character set.
def charset
- show_variable 'character_set_database'
+ show_variable "character_set_database"
end
# Returns the database collation strategy.
def collation
- show_variable 'collation_database'
- end
-
- def tables(name = nil) # :nodoc:
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
- #tables currently returns both tables and views.
- This behavior is deprecated and will be changed with Rails 5.1 to only return tables.
- Use #data_sources instead.
- MSG
-
- if name
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
- Passing arguments to #tables is deprecated without replacement.
- MSG
- end
-
- data_sources
- end
-
- def data_sources
- sql = "SELECT table_name FROM information_schema.tables "
- sql << "WHERE table_schema = #{quote(@config[:database])}"
-
- select_values(sql, 'SCHEMA')
- end
-
- def truncate(table_name, name = nil)
- execute "TRUNCATE TABLE #{quote_table_name(table_name)}", name
- end
-
- def table_exists?(table_name)
- # Update lib/active_record/internal_metadata.rb when this gets removed
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
- #table_exists? currently checks both tables and views.
- This behavior is deprecated and will be changed with Rails 5.1 to only check tables.
- Use #data_source_exists? instead.
- MSG
-
- data_source_exists?(table_name)
+ show_variable "collation_database"
end
- def data_source_exists?(table_name)
- return false unless table_name.present?
+ def table_comment(table_name) # :nodoc:
+ scope = quoted_scope(table_name)
- schema, name = table_name.to_s.split('.', 2)
- schema, name = @config[:database], schema unless name # A table was provided without a schema
-
- sql = "SELECT table_name FROM information_schema.tables "
- sql << "WHERE table_schema = #{quote(schema)} AND table_name = #{quote(name)}"
-
- select_values(sql, 'SCHEMA').any?
- end
-
- def views # :nodoc:
- select_values("SHOW FULL TABLES WHERE table_type = 'VIEW'", 'SCHEMA')
- end
-
- def view_exists?(view_name) # :nodoc:
- return false unless view_name.present?
-
- schema, name = view_name.to_s.split('.', 2)
- schema, name = @config[:database], schema unless name # A view was provided without a schema
-
- sql = "SELECT table_name FROM information_schema.tables WHERE table_type = 'VIEW'"
- sql << " AND table_schema = #{quote(schema)} AND table_name = #{quote(name)}"
-
- select_values(sql, 'SCHEMA').any?
- end
-
- # Returns an array of indexes for the given table.
- def indexes(table_name, name = nil) #:nodoc:
- indexes = []
- current_index = nil
- execute_and_free("SHOW KEYS FROM #{quote_table_name(table_name)}", 'SCHEMA') do |result|
- each_hash(result) do |row|
- if current_index != row[:Key_name]
- next if row[:Key_name] == 'PRIMARY' # skip the primary key
- current_index = row[:Key_name]
-
- mysql_index_type = row[:Index_type].downcase.to_sym
- index_type = INDEX_TYPES.include?(mysql_index_type) ? mysql_index_type : nil
- index_using = INDEX_USINGS.include?(mysql_index_type) ? mysql_index_type : nil
- indexes << IndexDefinition.new(row[:Table], row[:Key_name], row[:Non_unique].to_i == 0, [], [], nil, nil, index_type, index_using)
- end
-
- indexes.last.columns << row[:Column_name]
- indexes.last.lengths << row[:Sub_part]
- end
- end
-
- indexes
- end
-
- # Returns an array of +Column+ objects for the table specified by +table_name+.
- def columns(table_name) # :nodoc:
- table_name = table_name.to_s
- column_definitions(table_name).map do |field|
- type_metadata = fetch_type_metadata(field[:Type], field[:Extra])
- if type_metadata.type == :datetime && field[:Default] == "CURRENT_TIMESTAMP"
- default, default_function = nil, field[:Default]
- else
- default, default_function = field[:Default], nil
- end
- new_column(field[:Field], default, type_metadata, field[:Null] == "YES", table_name, default_function, field[:Collation])
- end
- end
-
- def create_table(table_name, options = {}) #:nodoc:
- super(table_name, options.reverse_merge(:options => "ENGINE=InnoDB"))
+ query_value(<<~SQL, "SCHEMA").presence
+ SELECT table_comment
+ FROM information_schema.tables
+ WHERE table_schema = #{scope[:schema]}
+ AND table_name = #{scope[:name]}
+ SQL
end
def bulk_change_table(table_name, operations) #:nodoc:
sqls = operations.flat_map do |command, args|
table, arguments = args.shift, args
- method = :"#{command}_sql"
+ method = :"#{command}_for_alter"
if respond_to?(method, true)
send(method, table, *arguments)
@@ -439,6 +300,11 @@ module ActiveRecord
execute("ALTER TABLE #{quote_table_name(table_name)} #{sqls}")
end
+ def change_table_comment(table_name, comment) #:nodoc:
+ comment = "" if comment.nil?
+ execute("ALTER TABLE #{quote_table_name(table_name)} COMMENT #{quote(comment)}")
+ end
+
# Renames a table.
#
# Example:
@@ -464,7 +330,6 @@ module ActiveRecord
# it can be helpful to provide these in a migration's +change+ method so it can be reverted.
# In that case, +options+ and the block will be used by create_table.
def drop_table(table_name, options = {})
- create_table_info_cache.delete(table_name) if create_table_info_cache.key?(table_name)
execute "DROP#{' TEMPORARY' if options[:temporary]} TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_table_name(table_name)}#{' CASCADE' if options[:force] == :cascade}"
end
@@ -480,99 +345,101 @@ module ActiveRecord
def change_column_default(table_name, column_name, default_or_changes) #:nodoc:
default = extract_new_default_value(default_or_changes)
- column = column_for(table_name, column_name)
- change_column table_name, column_name, column.sql_type, :default => default
+ change_column table_name, column_name, nil, default: default
end
def change_column_null(table_name, column_name, null, default = nil) #:nodoc:
- column = column_for(table_name, column_name)
-
unless null || default.nil?
execute("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote(default)} WHERE #{quote_column_name(column_name)} IS NULL")
end
- change_column table_name, column_name, column.sql_type, :null => null
+ change_column table_name, column_name, nil, null: null
+ end
+
+ def change_column_comment(table_name, column_name, comment) #:nodoc:
+ change_column table_name, column_name, nil, comment: comment
end
def change_column(table_name, column_name, type, options = {}) #:nodoc:
- execute("ALTER TABLE #{quote_table_name(table_name)} #{change_column_sql(table_name, column_name, type, options)}")
+ execute("ALTER TABLE #{quote_table_name(table_name)} #{change_column_for_alter(table_name, column_name, type, options)}")
end
def rename_column(table_name, column_name, new_column_name) #:nodoc:
- execute("ALTER TABLE #{quote_table_name(table_name)} #{rename_column_sql(table_name, column_name, new_column_name)}")
+ execute("ALTER TABLE #{quote_table_name(table_name)} #{rename_column_for_alter(table_name, column_name, new_column_name)}")
rename_column_indexes(table_name, column_name, new_column_name)
end
def add_index(table_name, column_name, options = {}) #:nodoc:
- index_name, index_type, index_columns, _, index_algorithm, index_using = add_index_options(table_name, column_name, options)
- execute "CREATE #{index_type} INDEX #{quote_column_name(index_name)} #{index_using} ON #{quote_table_name(table_name)} (#{index_columns}) #{index_algorithm}"
+ index_name, index_type, index_columns, _, index_algorithm, index_using, comment = add_index_options(table_name, column_name, options)
+ sql = +"CREATE #{index_type} INDEX #{quote_column_name(index_name)} #{index_using} ON #{quote_table_name(table_name)} (#{index_columns}) #{index_algorithm}"
+ execute add_sql_comment!(sql, comment)
+ end
+
+ def add_sql_comment!(sql, comment) # :nodoc:
+ sql << " COMMENT #{quote(comment)}" if comment.present?
+ sql
end
def foreign_keys(table_name)
- fk_info = select_all <<-SQL.strip_heredoc
- SELECT fk.referenced_table_name as 'to_table'
- ,fk.referenced_column_name as 'primary_key'
- ,fk.column_name as 'column'
- ,fk.constraint_name as 'name'
- FROM information_schema.key_column_usage fk
- WHERE fk.referenced_column_name is not null
- AND fk.table_schema = '#{@config[:database]}'
- AND fk.table_name = '#{table_name}'
- SQL
+ raise ArgumentError unless table_name.present?
- create_table_info = create_table_info(table_name)
+ scope = quoted_scope(table_name)
+
+ fk_info = exec_query(<<~SQL, "SCHEMA")
+ SELECT fk.referenced_table_name AS 'to_table',
+ fk.referenced_column_name AS 'primary_key',
+ fk.column_name AS 'column',
+ fk.constraint_name AS 'name',
+ rc.update_rule AS 'on_update',
+ rc.delete_rule AS 'on_delete'
+ FROM information_schema.referential_constraints rc
+ JOIN information_schema.key_column_usage fk
+ USING (constraint_schema, constraint_name)
+ WHERE fk.referenced_column_name IS NOT NULL
+ AND fk.table_schema = #{scope[:schema]}
+ AND fk.table_name = #{scope[:name]}
+ AND rc.constraint_schema = #{scope[:schema]}
+ AND rc.table_name = #{scope[:name]}
+ SQL
fk_info.map do |row|
options = {
- column: row['column'],
- name: row['name'],
- primary_key: row['primary_key']
+ column: row["column"],
+ name: row["name"],
+ primary_key: row["primary_key"]
}
- options[:on_update] = extract_foreign_key_action(create_table_info, row['name'], "UPDATE")
- options[:on_delete] = extract_foreign_key_action(create_table_info, row['name'], "DELETE")
+ options[:on_update] = extract_foreign_key_action(row["on_update"])
+ options[:on_delete] = extract_foreign_key_action(row["on_delete"])
- ForeignKeyDefinition.new(table_name, row['to_table'], options)
+ ForeignKeyDefinition.new(table_name, row["to_table"], options)
end
end
- def table_options(table_name)
+ def table_options(table_name) # :nodoc:
+ table_options = {}
+
create_table_info = create_table_info(table_name)
# strip create_definitions and partition_options
- raw_table_options = create_table_info.sub(/\A.*\n\) /m, '').sub(/\n\/\*!.*\*\/\n\z/m, '').strip
+ raw_table_options = create_table_info.sub(/\A.*\n\) /m, "").sub(/\n\/\*!.*\*\/\n\z/m, "").strip
# strip AUTO_INCREMENT
- raw_table_options.sub(/(ENGINE=\w+)(?: AUTO_INCREMENT=\d+)/, '\1')
- end
-
- # Maps logical Rails types to MySQL-specific data types.
- def type_to_sql(type, limit = nil, precision = nil, scale = nil, unsigned = nil)
- sql = case type.to_s
- when 'integer'
- integer_to_sql(limit)
- when 'text'
- text_to_sql(limit)
- when 'blob'
- binary_to_sql(limit)
- when 'binary'
- if (0..0xfff) === limit
- "varbinary(#{limit})"
- else
- binary_to_sql(limit)
- end
- else
- super(type, limit, precision, scale)
+ raw_table_options.sub!(/(ENGINE=\w+)(?: AUTO_INCREMENT=\d+)/, '\1')
+
+ table_options[:options] = raw_table_options
+
+ # strip COMMENT
+ if raw_table_options.sub!(/ COMMENT='.+'/, "")
+ table_options[:comment] = table_comment(table_name)
end
- sql << ' unsigned' if unsigned && type != :primary_key
- sql
+ table_options
end
# SHOW VARIABLES LIKE 'name'
def show_variable(name)
- variables = select_all("select @@#{name} as 'Value'", 'SCHEMA')
- variables.first['Value'] unless variables.empty?
+ query_value("SELECT @@#{name}", "SCHEMA")
rescue ActiveRecord::StatementInvalid
nil
end
@@ -580,22 +447,38 @@ module ActiveRecord
def primary_keys(table_name) # :nodoc:
raise ArgumentError unless table_name.present?
- schema, name = table_name.to_s.split('.', 2)
- schema, name = @config[:database], schema unless name # A table was provided without a schema
+ scope = quoted_scope(table_name)
- select_values(<<-SQL.strip_heredoc, 'SCHEMA')
+ query_values(<<~SQL, "SCHEMA")
SELECT column_name
FROM information_schema.key_column_usage
WHERE constraint_name = 'PRIMARY'
- AND table_schema = #{quote(schema)}
- AND table_name = #{quote(name)}
+ AND table_schema = #{scope[:schema]}
+ AND table_name = #{scope[:name]}
ORDER BY ordinal_position
SQL
end
- def case_sensitive_comparison(table, attribute, column, value)
- if !value.nil? && column.collation && !column.case_sensitive?
- table[attribute].eq(Arel::Nodes::Bin.new(Arel::Nodes::BindParam.new))
+ def default_uniqueness_comparison(attribute, value, klass) # :nodoc:
+ column = column_for_attribute(attribute)
+
+ if column.collation && !column.case_sensitive? && !value.nil?
+ ActiveSupport::Deprecation.warn(<<~MSG.squish)
+ Uniqueness validator will no longer enforce case sensitive comparison in Rails 6.1.
+ To continue case sensitive comparison on the :#{attribute.name} attribute in #{klass} model,
+ pass `case_sensitive: true` option explicitly to the uniqueness validator.
+ MSG
+ attribute.eq(Arel::Nodes::Bin.new(value))
+ else
+ super
+ end
+ end
+
+ def case_sensitive_comparison(attribute, value) # :nodoc:
+ column = column_for_attribute(attribute)
+
+ if column.collation && !column.case_sensitive?
+ attribute.eq(Arel::Nodes::Bin.new(value))
else
super
end
@@ -615,350 +498,334 @@ module ActiveRecord
# Convert Arel node to string
s = s.to_sql unless s.is_a?(String)
# Remove any ASC/DESC modifiers
- s.gsub(/\s+(?:ASC|DESC)\b/i, '')
+ s.gsub(/\s+(?:ASC|DESC)\b/i, "")
}.reject(&:blank?).map.with_index { |column, i| "#{column} AS alias_#{i}" }
- [super, *order_columns].join(', ')
+ (order_columns << super).join(", ")
end
def strict_mode?
self.class.type_cast_config_to_boolean(@config.fetch(:strict, true))
end
- def valid_type?(type)
- !native_database_types[type].nil?
+ def default_index_type?(index) # :nodoc:
+ index.using == :btree || super
end
- protected
-
- def initialize_type_map(m) # :nodoc:
- super
+ def build_insert_sql(insert) # :nodoc:
+ sql = +"INSERT #{insert.into} #{insert.values_list}"
- register_class_with_limit m, %r(char)i, MysqlString
-
- m.register_type %r(tinytext)i, Type::Text.new(limit: 2**8 - 1)
- m.register_type %r(tinyblob)i, Type::Binary.new(limit: 2**8 - 1)
- m.register_type %r(text)i, Type::Text.new(limit: 2**16 - 1)
- m.register_type %r(blob)i, Type::Binary.new(limit: 2**16 - 1)
- m.register_type %r(mediumtext)i, Type::Text.new(limit: 2**24 - 1)
- m.register_type %r(mediumblob)i, Type::Binary.new(limit: 2**24 - 1)
- m.register_type %r(longtext)i, Type::Text.new(limit: 2**32 - 1)
- m.register_type %r(longblob)i, Type::Binary.new(limit: 2**32 - 1)
- m.register_type %r(^float)i, Type::Float.new(limit: 24)
- m.register_type %r(^double)i, Type::Float.new(limit: 53)
- m.register_type %r(^json)i, MysqlJson.new
-
- register_integer_type m, %r(^bigint)i, limit: 8
- register_integer_type m, %r(^int)i, limit: 4
- register_integer_type m, %r(^mediumint)i, limit: 3
- register_integer_type m, %r(^smallint)i, limit: 2
- register_integer_type m, %r(^tinyint)i, limit: 1
-
- m.register_type %r(^tinyint\(1\))i, Type::Boolean.new if emulate_booleans
- m.alias_type %r(year)i, 'integer'
- m.alias_type %r(bit)i, 'binary'
-
- m.register_type(%r(enum)i) do |sql_type|
- limit = sql_type[/^enum\((.+)\)/i, 1]
- .split(',').map{|enum| enum.strip.length - 2}.max
- MysqlString.new(limit: limit)
+ if insert.skip_duplicates?
+ any_column = quote_column_name(insert.model.columns.first.name)
+ sql << " ON DUPLICATE KEY UPDATE #{any_column}=#{any_column}"
+ elsif insert.update_duplicates?
+ sql << " ON DUPLICATE KEY UPDATE "
+ sql << insert.updatable_columns.map { |column| "#{column}=VALUES(#{column})" }.join(",")
end
- m.register_type(%r(^set)i) do |sql_type|
- limit = sql_type[/^set\((.+)\)/i, 1]
- .split(',').map{|set| set.strip.length - 1}.sum - 1
- MysqlString.new(limit: limit)
- end
+ sql
end
- def register_integer_type(mapping, key, options) # :nodoc:
- mapping.register_type(key) do |sql_type|
- if /\bunsigned\z/ === sql_type
- Type::UnsignedInteger.new(options)
- else
- Type::Integer.new(options)
+ private
+ def check_version
+ if version < "5.5.8"
+ raise "Your version of MySQL (#{version_string}) is too old. Active Record supports MySQL >= 5.5.8."
end
end
- end
- def extract_precision(sql_type)
- if /time/ === sql_type
- super || 0
- else
+ def initialize_type_map(m = type_map)
super
- end
- end
- def fetch_type_metadata(sql_type, extra = "")
- MySQL::TypeMetadata.new(super(sql_type), extra: extra, strict: strict_mode?)
- end
+ register_class_with_limit m, %r(char)i, MysqlString
+
+ m.register_type %r(tinytext)i, Type::Text.new(limit: 2**8 - 1)
+ m.register_type %r(tinyblob)i, Type::Binary.new(limit: 2**8 - 1)
+ m.register_type %r(text)i, Type::Text.new(limit: 2**16 - 1)
+ m.register_type %r(blob)i, Type::Binary.new(limit: 2**16 - 1)
+ m.register_type %r(mediumtext)i, Type::Text.new(limit: 2**24 - 1)
+ m.register_type %r(mediumblob)i, Type::Binary.new(limit: 2**24 - 1)
+ m.register_type %r(longtext)i, Type::Text.new(limit: 2**32 - 1)
+ m.register_type %r(longblob)i, Type::Binary.new(limit: 2**32 - 1)
+ m.register_type %r(^float)i, Type::Float.new(limit: 24)
+ m.register_type %r(^double)i, Type::Float.new(limit: 53)
+
+ register_integer_type m, %r(^bigint)i, limit: 8
+ register_integer_type m, %r(^int)i, limit: 4
+ register_integer_type m, %r(^mediumint)i, limit: 3
+ register_integer_type m, %r(^smallint)i, limit: 2
+ register_integer_type m, %r(^tinyint)i, limit: 1
+
+ m.register_type %r(^tinyint\(1\))i, Type::Boolean.new if emulate_booleans
+ m.alias_type %r(year)i, "integer"
+ m.alias_type %r(bit)i, "binary"
+
+ m.register_type(%r(enum)i) do |sql_type|
+ limit = sql_type[/^enum\s*\((.+)\)/i, 1]
+ .split(",").map { |enum| enum.strip.length - 2 }.max
+ MysqlString.new(limit: limit)
+ end
- def add_index_length(option_strings, column_names, options = {})
- if options.is_a?(Hash) && length = options[:length]
- case length
- when Hash
- column_names.each {|name| option_strings[name] += "(#{length[name]})" if length.has_key?(name) && length[name].present?}
- when Fixnum
- column_names.each {|name| option_strings[name] += "(#{length})"}
+ m.register_type(%r(^set)i) do |sql_type|
+ limit = sql_type[/^set\s*\((.+)\)/i, 1]
+ .split(",").map { |set| set.strip.length - 1 }.sum - 1
+ MysqlString.new(limit: limit)
end
end
- return option_strings
- end
-
- def quoted_columns_for_index(column_names, options = {})
- option_strings = Hash[column_names.map {|name| [name, '']}]
-
- # add index length
- option_strings = add_index_length(option_strings, column_names, options)
-
- # add index sort order
- option_strings = add_index_sort_order(option_strings, column_names, options)
-
- column_names.map {|name| quote_column_name(name) + option_strings[name]}
- end
-
- def translate_exception(exception, message)
- case error_number(exception)
- when 1062
- RecordNotUnique.new(message)
- when 1452
- InvalidForeignKey.new(message)
- else
- super
+ def register_integer_type(mapping, key, options)
+ mapping.register_type(key) do |sql_type|
+ if /\bunsigned\b/.match?(sql_type)
+ Type::UnsignedInteger.new(options)
+ else
+ Type::Integer.new(options)
+ end
+ end
end
- end
-
- def add_column_sql(table_name, column_name, type, options = {})
- td = create_table_definition(table_name)
- cd = td.new_column_definition(column_name, type, options)
- schema_creation.accept(AddColumnDefinition.new(cd))
- end
- def change_column_sql(table_name, column_name, type, options = {})
- column = column_for(table_name, column_name)
-
- unless options_include_default?(options)
- options[:default] = column.default
+ def extract_precision(sql_type)
+ if /\A(?:date)?time(?:stamp)?\b/.match?(sql_type)
+ super || 0
+ else
+ super
+ end
end
- unless options.has_key?(:null)
- options[:null] = column.null
+ # See https://dev.mysql.com/doc/refman/5.7/en/error-messages-server.html
+ ER_DUP_ENTRY = 1062
+ ER_NOT_NULL_VIOLATION = 1048
+ ER_NO_REFERENCED_ROW = 1216
+ ER_ROW_IS_REFERENCED = 1217
+ ER_DO_NOT_HAVE_DEFAULT = 1364
+ ER_ROW_IS_REFERENCED_2 = 1451
+ ER_NO_REFERENCED_ROW_2 = 1452
+ ER_DATA_TOO_LONG = 1406
+ ER_OUT_OF_RANGE = 1264
+ ER_LOCK_DEADLOCK = 1213
+ ER_CANNOT_ADD_FOREIGN = 1215
+ ER_CANNOT_CREATE_TABLE = 1005
+ ER_LOCK_WAIT_TIMEOUT = 1205
+ ER_QUERY_INTERRUPTED = 1317
+ ER_QUERY_TIMEOUT = 3024
+ ER_FK_INCOMPATIBLE_COLUMNS = 3780
+
+ def translate_exception(exception, message:, sql:, binds:)
+ case error_number(exception)
+ when ER_DUP_ENTRY
+ RecordNotUnique.new(message, sql: sql, binds: binds)
+ when ER_NO_REFERENCED_ROW, ER_ROW_IS_REFERENCED, ER_ROW_IS_REFERENCED_2, ER_NO_REFERENCED_ROW_2
+ InvalidForeignKey.new(message, sql: sql, binds: binds)
+ when ER_CANNOT_ADD_FOREIGN, ER_FK_INCOMPATIBLE_COLUMNS
+ mismatched_foreign_key(message, sql: sql, binds: binds)
+ when ER_CANNOT_CREATE_TABLE
+ if message.include?("errno: 150")
+ mismatched_foreign_key(message, sql: sql, binds: binds)
+ else
+ super
+ end
+ when ER_DATA_TOO_LONG
+ ValueTooLong.new(message, sql: sql, binds: binds)
+ when ER_OUT_OF_RANGE
+ RangeError.new(message, sql: sql, binds: binds)
+ when ER_NOT_NULL_VIOLATION, ER_DO_NOT_HAVE_DEFAULT
+ NotNullViolation.new(message, sql: sql, binds: binds)
+ when ER_LOCK_DEADLOCK
+ Deadlocked.new(message, sql: sql, binds: binds)
+ when ER_LOCK_WAIT_TIMEOUT
+ LockWaitTimeout.new(message, sql: sql, binds: binds)
+ when ER_QUERY_TIMEOUT
+ StatementTimeout.new(message, sql: sql, binds: binds)
+ when ER_QUERY_INTERRUPTED
+ QueryCanceled.new(message, sql: sql, binds: binds)
+ else
+ super
+ end
end
- td = create_table_definition(table_name)
- cd = td.new_column_definition(column.name, type, options)
- schema_creation.accept(ChangeColumnDefinition.new(cd, column.name))
- end
+ def change_column_for_alter(table_name, column_name, type, options = {})
+ column = column_for(table_name, column_name)
+ type ||= column.sql_type
- def rename_column_sql(table_name, column_name, new_column_name)
- column = column_for(table_name, column_name)
- options = {
- default: column.default,
- null: column.null,
- auto_increment: column.auto_increment?
- }
-
- current_type = select_one("SHOW COLUMNS FROM #{quote_table_name(table_name)} LIKE '#{column_name}'", 'SCHEMA')["Type"]
- td = create_table_definition(table_name)
- cd = td.new_column_definition(new_column_name, current_type, options)
- schema_creation.accept(ChangeColumnDefinition.new(cd, column.name))
- end
+ unless options.key?(:default)
+ options[:default] = column.default
+ end
- def remove_column_sql(table_name, column_name, type = nil, options = {})
- "DROP #{quote_column_name(column_name)}"
- end
+ unless options.key?(:null)
+ options[:null] = column.null
+ end
- def remove_columns_sql(table_name, *column_names)
- column_names.map {|column_name| remove_column_sql(table_name, column_name) }
- end
+ unless options.key?(:comment)
+ options[:comment] = column.comment
+ end
- def add_index_sql(table_name, column_name, options = {})
- index_name, index_type, index_columns, _, index_algorithm, index_using = add_index_options(table_name, column_name, options)
- index_algorithm[0, 0] = ", " if index_algorithm.present?
- "ADD #{index_type} INDEX #{quote_column_name(index_name)} #{index_using} (#{index_columns})#{index_algorithm}"
- end
+ td = create_table_definition(table_name)
+ cd = td.new_column_definition(column.name, type, options)
+ schema_creation.accept(ChangeColumnDefinition.new(cd, column.name))
+ end
- def remove_index_sql(table_name, options = {})
- index_name = index_name_for_remove(table_name, options)
- "DROP INDEX #{index_name}"
- end
+ def rename_column_for_alter(table_name, column_name, new_column_name)
+ column = column_for(table_name, column_name)
+ options = {
+ default: column.default,
+ null: column.null,
+ auto_increment: column.auto_increment?
+ }
- def add_timestamps_sql(table_name, options = {})
- [add_column_sql(table_name, :created_at, :datetime, options), add_column_sql(table_name, :updated_at, :datetime, options)]
- end
+ current_type = exec_query("SHOW COLUMNS FROM #{quote_table_name(table_name)} LIKE #{quote(column_name)}", "SCHEMA").first["Type"]
+ td = create_table_definition(table_name)
+ cd = td.new_column_definition(new_column_name, current_type, options)
+ schema_creation.accept(ChangeColumnDefinition.new(cd, column.name))
+ end
- def remove_timestamps_sql(table_name, options = {})
- [remove_column_sql(table_name, :updated_at), remove_column_sql(table_name, :created_at)]
- end
+ def add_index_for_alter(table_name, column_name, options = {})
+ index_name, index_type, index_columns, _, index_algorithm, index_using = add_index_options(table_name, column_name, options)
+ index_algorithm[0, 0] = ", " if index_algorithm.present?
+ "ADD #{index_type} INDEX #{quote_column_name(index_name)} #{index_using} (#{index_columns})#{index_algorithm}"
+ end
- private
+ def remove_index_for_alter(table_name, options = {})
+ index_name = index_name_for_remove(table_name, options)
+ "DROP INDEX #{quote_column_name(index_name)}"
+ end
- # MySQL is too stupid to create a temporary table for use subquery, so we have
- # to give it some prompting in the form of a subsubquery. Ugh!
- def subquery_for(key, select)
- subsubselect = select.clone
- subsubselect.projections = [key]
+ def add_timestamps_for_alter(table_name, options = {})
+ options[:null] = false if options[:null].nil?
- # Materialize subquery by adding distinct
- # to work with MySQL 5.7.6 which sets optimizer_switch='derived_merge=on'
- subsubselect.distinct unless select.limit || select.offset || select.orders.any?
+ if !options.key?(:precision) && supports_datetime_with_precision?
+ options[:precision] = 6
+ end
- subselect = Arel::SelectManager.new(select.engine)
- subselect.project Arel.sql(key.name)
- subselect.from subsubselect.as('__active_record_temp')
- end
+ [add_column_for_alter(table_name, :created_at, :datetime, options), add_column_for_alter(table_name, :updated_at, :datetime, options)]
+ end
- def mariadb?
- full_version =~ /mariadb/i
- end
+ def remove_timestamps_for_alter(table_name, options = {})
+ [remove_column_for_alter(table_name, :updated_at), remove_column_for_alter(table_name, :created_at)]
+ end
- def supports_rename_index?
- mariadb? ? false : version >= '5.7.6'
- end
+ def supports_rename_index?
+ mariadb? ? false : version >= "5.7.6"
+ end
- def configure_connection
- variables = @config.fetch(:variables, {}).stringify_keys
+ def configure_connection
+ variables = @config.fetch(:variables, {}).stringify_keys
- # By default, MySQL 'where id is null' selects the last inserted id; Turn this off.
- variables['sql_auto_is_null'] = 0
+ # By default, MySQL 'where id is null' selects the last inserted id; Turn this off.
+ variables["sql_auto_is_null"] = 0
- # Increase timeout so the server doesn't disconnect us.
- wait_timeout = @config[:wait_timeout]
- wait_timeout = 2147483 unless wait_timeout.is_a?(Fixnum)
- variables['wait_timeout'] = self.class.type_cast_config_to_integer(wait_timeout)
+ # Increase timeout so the server doesn't disconnect us.
+ wait_timeout = self.class.type_cast_config_to_integer(@config[:wait_timeout])
+ wait_timeout = 2147483 unless wait_timeout.is_a?(Integer)
+ variables["wait_timeout"] = wait_timeout
- defaults = [':default', :default].to_set
+ defaults = [":default", :default].to_set
- # Make MySQL reject illegal values rather than truncating or blanking them, see
- # http://dev.mysql.com/doc/refman/5.7/en/sql-mode.html#sqlmode_strict_all_tables
- # If the user has provided another value for sql_mode, don't replace it.
- if sql_mode = variables.delete('sql_mode')
- sql_mode = quote(sql_mode)
- elsif !defaults.include?(strict_mode?)
- if strict_mode?
- sql_mode = "CONCAT(@@sql_mode, ',STRICT_ALL_TABLES')"
- else
- sql_mode = "REPLACE(@@sql_mode, 'STRICT_TRANS_TABLES', '')"
- sql_mode = "REPLACE(#{sql_mode}, 'STRICT_ALL_TABLES', '')"
- sql_mode = "REPLACE(#{sql_mode}, 'TRADITIONAL', '')"
+ # Make MySQL reject illegal values rather than truncating or blanking them, see
+ # https://dev.mysql.com/doc/refman/5.7/en/sql-mode.html#sqlmode_strict_all_tables
+ # If the user has provided another value for sql_mode, don't replace it.
+ if sql_mode = variables.delete("sql_mode")
+ sql_mode = quote(sql_mode)
+ elsif !defaults.include?(strict_mode?)
+ if strict_mode?
+ sql_mode = "CONCAT(@@sql_mode, ',STRICT_ALL_TABLES')"
+ else
+ sql_mode = "REPLACE(@@sql_mode, 'STRICT_TRANS_TABLES', '')"
+ sql_mode = "REPLACE(#{sql_mode}, 'STRICT_ALL_TABLES', '')"
+ sql_mode = "REPLACE(#{sql_mode}, 'TRADITIONAL', '')"
+ end
+ sql_mode = "CONCAT(#{sql_mode}, ',NO_AUTO_VALUE_ON_ZERO')"
end
- sql_mode = "CONCAT(#{sql_mode}, ',NO_AUTO_VALUE_ON_ZERO')"
- end
- sql_mode_assignment = "@@SESSION.sql_mode = #{sql_mode}, " if sql_mode
-
- # NAMES does not have an equals sign, see
- # http://dev.mysql.com/doc/refman/5.7/en/set-statement.html#id944430
- # (trailing comma because variable_assignments will always have content)
- if @config[:encoding]
- encoding = "NAMES #{@config[:encoding]}"
- encoding << " COLLATE #{@config[:collation]}" if @config[:collation]
- encoding << ", "
- end
-
- # Gather up all of the SET variables...
- variable_assignments = variables.map do |k, v|
- if defaults.include?(v)
- "@@SESSION.#{k} = DEFAULT" # Sets the value to the global or compile default
- elsif !v.nil?
- "@@SESSION.#{k} = #{quote(v)}"
+ sql_mode_assignment = "@@SESSION.sql_mode = #{sql_mode}, " if sql_mode
+
+ # NAMES does not have an equals sign, see
+ # https://dev.mysql.com/doc/refman/5.7/en/set-names.html
+ # (trailing comma because variable_assignments will always have content)
+ if @config[:encoding]
+ encoding = +"NAMES #{@config[:encoding]}"
+ encoding << " COLLATE #{@config[:collation]}" if @config[:collation]
+ encoding << ", "
end
- # or else nil; compact to clear nils out
- end.compact.join(', ')
- # ...and send them all in one query
- @connection.query "SET #{encoding} #{sql_mode_assignment} #{variable_assignments}"
- end
+ # Gather up all of the SET variables...
+ variable_assignments = variables.map do |k, v|
+ if defaults.include?(v)
+ "@@SESSION.#{k} = DEFAULT" # Sets the value to the global or compile default
+ elsif !v.nil?
+ "@@SESSION.#{k} = #{quote(v)}"
+ end
+ # or else nil; compact to clear nils out
+ end.compact.join(", ")
- def column_definitions(table_name) # :nodoc:
- execute_and_free("SHOW FULL FIELDS FROM #{quote_table_name(table_name)}", 'SCHEMA') do |result|
- each_hash(result)
+ # ...and send them all in one query
+ execute "SET #{encoding} #{sql_mode_assignment} #{variable_assignments}"
end
- end
- def extract_foreign_key_action(structure, name, action) # :nodoc:
- if structure =~ /CONSTRAINT #{quote_column_name(name)} FOREIGN KEY .* REFERENCES .* ON #{action} (CASCADE|SET NULL|RESTRICT)/
- case $1
- when 'CASCADE'; :cascade
- when 'SET NULL'; :nullify
+ def column_definitions(table_name) # :nodoc:
+ execute_and_free("SHOW FULL FIELDS FROM #{quote_table_name(table_name)}", "SCHEMA") do |result|
+ each_hash(result)
end
end
- end
-
- def create_table_info_cache # :nodoc:
- @create_table_info_cache ||= {}
- end
-
- def create_table_info(table_name) # :nodoc:
- create_table_info_cache[table_name] ||= select_one("SHOW CREATE TABLE #{quote_table_name(table_name)}")["Create Table"]
- end
- def create_table_definition(name, temporary = false, options = nil, as = nil) # :nodoc:
- MySQL::TableDefinition.new(name, temporary, options, as)
- end
-
- def integer_to_sql(limit) # :nodoc:
- case limit
- when 1; 'tinyint'
- when 2; 'smallint'
- when 3; 'mediumint'
- when nil, 4; 'int'
- when 5..8; 'bigint'
- else raise(ActiveRecordError, "No integer type has byte size #{limit}")
+ def create_table_info(table_name) # :nodoc:
+ exec_query("SHOW CREATE TABLE #{quote_table_name(table_name)}", "SCHEMA").first["Create Table"]
end
- end
- def text_to_sql(limit) # :nodoc:
- case limit
- when 0..0xff; 'tinytext'
- when nil, 0x100..0xffff; 'text'
- when 0x10000..0xffffff; 'mediumtext'
- when 0x1000000..0xffffffff; 'longtext'
- else raise(ActiveRecordError, "No text type has byte length #{limit}")
+ def arel_visitor
+ Arel::Visitors::MySQL.new(self)
end
- end
- def binary_to_sql(limit) # :nodoc:
- case limit
- when 0..0xff; 'tinyblob'
- when nil, 0x100..0xffff; 'blob'
- when 0x10000..0xffffff; 'mediumblob'
- when 0x1000000..0xffffffff; 'longblob'
- else raise(ActiveRecordError, "No binary type has byte length #{limit}")
+ def build_statement_pool
+ StatementPool.new(self.class.type_cast_config_to_integer(@config[:statement_limit]))
end
- end
- class MysqlJson < Type::Internal::AbstractJson # :nodoc:
- def changed_in_place?(raw_old_value, new_value)
- # Normalization is required because MySQL JSON data format includes
- # the space between the elements.
- super(serialize(deserialize(raw_old_value)), new_value)
- end
- end
+ def mismatched_foreign_key(message, sql:, binds:)
+ match = %r/
+ (?:CREATE|ALTER)\s+TABLE\s*(?:`?\w+`?\.)?`?(?<table>\w+)`?.+?
+ FOREIGN\s+KEY\s*\(`?(?<foreign_key>\w+)`?\)\s*
+ REFERENCES\s*(`?(?<target_table>\w+)`?)\s*\(`?(?<primary_key>\w+)`?\)
+ /xmi.match(sql)
- class MysqlString < Type::String # :nodoc:
- def serialize(value)
- case value
- when true then "1"
- when false then "0"
- else super
+ options = {
+ message: message,
+ sql: sql,
+ binds: binds,
+ }
+
+ if match
+ options[:table] = match[:table]
+ options[:foreign_key] = match[:foreign_key]
+ options[:target_table] = match[:target_table]
+ options[:primary_key] = match[:primary_key]
+ options[:primary_key_column] = column_for(match[:target_table], match[:primary_key])
end
+
+ MismatchedForeignKey.new(options)
end
- private
+ def version_string
+ full_version.match(/^(?:5\.5\.5-)?(\d+\.\d+\.\d+)/)[1]
+ end
- def cast_value(value)
- case value
- when true then "1"
- when false then "0"
- else super
+ class MysqlString < Type::String # :nodoc:
+ def serialize(value)
+ case value
+ when true then "1"
+ when false then "0"
+ else super
+ end
end
+
+ private
+
+ def cast_value(value)
+ case value
+ when true then "1"
+ when false then "0"
+ else super
+ end
+ end
end
- end
- ActiveRecord::Type.register(:json, MysqlJson, adapter: :mysql2)
- ActiveRecord::Type.register(:string, MysqlString, adapter: :mysql2)
- ActiveRecord::Type.register(:unsigned_integer, Type::UnsignedInteger, adapter: :mysql2)
+ ActiveRecord::Type.register(:string, MysqlString, adapter: :mysql2)
+ ActiveRecord::Type.register(:unsigned_integer, Type::UnsignedInteger, adapter: :mysql2)
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/column.rb b/activerecord/lib/active_record/connection_adapters/column.rb
index 2e718b29fa..5d81de9fe1 100644
--- a/activerecord/lib/active_record/connection_adapters/column.rb
+++ b/activerecord/lib/active_record/connection_adapters/column.rb
@@ -1,21 +1,21 @@
-require 'set'
+# frozen_string_literal: true
module ActiveRecord
# :stopdoc:
module ConnectionAdapters
# An abstract definition of a column in a table.
class Column
- attr_reader :name, :default, :sql_type_metadata, :null, :table_name, :default_function, :collation
+ attr_reader :name, :default, :sql_type_metadata, :null, :table_name, :default_function, :collation, :comment
delegate :precision, :scale, :limit, :type, :sql_type, to: :sql_type_metadata, allow_nil: true
# Instantiates a new column in the table.
#
- # +name+ is the column's name, such as <tt>supplier_id</tt> in <tt>supplier_id int</tt>.
+ # +name+ is the column's name, such as <tt>supplier_id</tt> in <tt>supplier_id bigint</tt>.
# +default+ is the type-casted default value, such as +new+ in <tt>sales_stage varchar(20) default 'new'</tt>.
# +sql_type_metadata+ is various information about the type of the column
# +null+ determines if this column allows +NULL+ values.
- def initialize(name, default, sql_type_metadata = nil, null = true, table_name = nil, default_function = nil, collation = nil)
+ def initialize(name, default, sql_type_metadata = nil, null = true, table_name = nil, default_function = nil, collation = nil, comment: nil, **)
@name = name.freeze
@table_name = table_name
@sql_type_metadata = sql_type_metadata
@@ -23,6 +23,7 @@ module ActiveRecord
@default = default
@default_function = default_function
@collation = collation
+ @comment = comment
end
def has_default?
@@ -30,7 +31,7 @@ module ActiveRecord
end
def bigint?
- /\Abigint\b/ === sql_type
+ /\Abigint\b/.match?(sql_type)
end
# Returns the human name of the column name.
@@ -41,6 +42,28 @@ module ActiveRecord
Base.human_attribute_name(@name)
end
+ def init_with(coder)
+ @name = coder["name"]
+ @table_name = coder["table_name"]
+ @sql_type_metadata = coder["sql_type_metadata"]
+ @null = coder["null"]
+ @default = coder["default"]
+ @default_function = coder["default_function"]
+ @collation = coder["collation"]
+ @comment = coder["comment"]
+ end
+
+ def encode_with(coder)
+ coder["name"] = @name
+ coder["table_name"] = @table_name
+ coder["sql_type_metadata"] = @sql_type_metadata
+ coder["null"] = @null
+ coder["default"] = @default
+ coder["default_function"] = @default_function
+ coder["collation"] = @collation
+ coder["comment"] = @comment
+ end
+
def ==(other)
other.is_a?(Column) &&
attributes_for_hash == other.attributes_for_hash
@@ -53,9 +76,9 @@ module ActiveRecord
protected
- def attributes_for_hash
- [self.class, name, default, sql_type_metadata, null, table_name, default_function, collation]
- end
+ def attributes_for_hash
+ [self.class, name, default, sql_type_metadata, null, table_name, default_function, collation]
+ end
end
class NullColumn < Column
diff --git a/activerecord/lib/active_record/connection_adapters/connection_specification.rb b/activerecord/lib/active_record/connection_adapters/connection_specification.rb
index 4bc6447368..9eaf9d9a89 100644
--- a/activerecord/lib/active_record/connection_adapters/connection_specification.rb
+++ b/activerecord/lib/active_record/connection_adapters/connection_specification.rb
@@ -1,21 +1,26 @@
-require 'uri'
+# frozen_string_literal: true
+
+require "uri"
module ActiveRecord
module ConnectionAdapters
class ConnectionSpecification #:nodoc:
- attr_reader :config, :adapter_method
+ attr_reader :name, :config, :adapter_method
- def initialize(config, adapter_method)
- @config, @adapter_method = config, adapter_method
+ def initialize(name, config, adapter_method)
+ @name, @config, @adapter_method = name, config, adapter_method
end
def initialize_dup(original)
@config = original.config.dup
end
+ def to_hash
+ @config.merge(name: @name)
+ end
+
# Expands a connection string into a hash.
class ConnectionUrlResolver # :nodoc:
-
# == Example
#
# url = "postgresql://foo:bar@localhost:9000/foo_test?pool=5&timeout=3000"
@@ -33,11 +38,11 @@ module ActiveRecord
def initialize(url)
raise "Database URL cannot be empty" if url.blank?
@uri = uri_parser.parse(url)
- @adapter = @uri.scheme && @uri.scheme.tr('-', '_')
+ @adapter = @uri.scheme && @uri.scheme.tr("-", "_")
@adapter = "postgresql" if @adapter == "postgres"
if @uri.opaque
- @uri.opaque, @query = @uri.opaque.split('?', 2)
+ @uri.opaque, @query = @uri.opaque.split("?", 2)
else
@query = @uri.query
end
@@ -45,65 +50,63 @@ module ActiveRecord
# Converts the given URL to a full connection hash.
def to_hash
- config = raw_config.reject { |_,value| value.blank? }
- config.map { |key,value| config[key] = uri_parser.unescape(value) if value.is_a? String }
+ config = raw_config.reject { |_, value| value.blank? }
+ config.map { |key, value| config[key] = uri_parser.unescape(value) if value.is_a? String }
config
end
private
- def uri
- @uri
- end
+ attr_reader :uri
- def uri_parser
- @uri_parser ||= URI::Parser.new
- end
+ def uri_parser
+ @uri_parser ||= URI::Parser.new
+ end
- # Converts the query parameters of the URI into a hash.
- #
- # "localhost?pool=5&reaping_frequency=2"
- # # => { "pool" => "5", "reaping_frequency" => "2" }
- #
- # returns empty hash if no query present.
- #
- # "localhost"
- # # => {}
- def query_hash
- Hash[(@query || '').split("&").map { |pair| pair.split("=") }]
- end
+ # Converts the query parameters of the URI into a hash.
+ #
+ # "localhost?pool=5&reaping_frequency=2"
+ # # => { "pool" => "5", "reaping_frequency" => "2" }
+ #
+ # returns empty hash if no query present.
+ #
+ # "localhost"
+ # # => {}
+ def query_hash
+ Hash[(@query || "").split("&").map { |pair| pair.split("=") }]
+ end
- def raw_config
- if uri.opaque
- query_hash.merge({
- "adapter" => @adapter,
- "database" => uri.opaque })
- else
- query_hash.merge({
- "adapter" => @adapter,
- "username" => uri.user,
- "password" => uri.password,
- "port" => uri.port,
- "database" => database_from_path,
- "host" => uri.hostname })
+ def raw_config
+ if uri.opaque
+ query_hash.merge(
+ "adapter" => @adapter,
+ "database" => uri.opaque)
+ else
+ query_hash.merge(
+ "adapter" => @adapter,
+ "username" => uri.user,
+ "password" => uri.password,
+ "port" => uri.port,
+ "database" => database_from_path,
+ "host" => uri.hostname)
+ end
end
- end
- # Returns name of the database.
- def database_from_path
- if @adapter == 'sqlite3'
- # 'sqlite3:/foo' is absolute, because that makes sense. The
- # corresponding relative version, 'sqlite3:foo', is handled
- # elsewhere, as an "opaque".
+ # Returns name of the database.
+ def database_from_path
+ if @adapter == "sqlite3"
+ # 'sqlite3:/foo' is absolute, because that makes sense. The
+ # corresponding relative version, 'sqlite3:foo', is handled
+ # elsewhere, as an "opaque".
- uri.path
- else
- # Only SQLite uses a filename as the "database" name; for
- # anything else, a leading slash would be silly.
+ uri.path
+ else
+ # Only SQLite uses a filename as the "database" name; for
+ # anything else, a leading slash would be silly.
- uri.path.sub(%r{^/}, "")
+ uri.path.sub(%r{^/}, "")
+ end
end
- end
end
##
@@ -111,8 +114,7 @@ module ActiveRecord
class Resolver # :nodoc:
attr_reader :configurations
- # Accepts a hash two layers deep, keys on the first layer represent
- # environments such as "production". Keys must be strings.
+ # Accepts a list of db config objects.
def initialize(configurations)
@configurations = configurations
end
@@ -133,25 +135,14 @@ module ActiveRecord
# Resolver.new(configurations).resolve(:production)
# # => { "host" => "localhost", "database" => "foo", "adapter" => "postgresql" }
#
- def resolve(config)
- if config
- resolve_connection config
- elsif env = ActiveRecord::ConnectionHandling::RAILS_ENV.call
- resolve_symbol_connection env.to_sym
+ def resolve(config_or_env, pool_name = nil)
+ if config_or_env
+ resolve_connection config_or_env, pool_name
else
raise AdapterNotSpecified
end
end
- # Expands each key in @configurations hash into fully resolved hash
- def resolve_all
- config = configurations.dup
- config.each do |key, value|
- config[key] = resolve(value) if value
- end
- config
- end
-
# Returns an instance of ConnectionSpecification for a given adapter.
# Accepts a hash one layer deep that contains all connection information.
#
@@ -165,91 +156,141 @@ module ActiveRecord
# # => { "host" => "localhost", "database" => "foo", "adapter" => "sqlite3" }
#
def spec(config)
- spec = resolve(config).symbolize_keys
+ pool_name = config if config.is_a?(Symbol)
+
+ spec = resolve(config, pool_name).symbolize_keys
raise(AdapterNotSpecified, "database configuration does not specify adapter") unless spec.key?(:adapter)
+ # Require the adapter itself and give useful feedback about
+ # 1. Missing adapter gems and
+ # 2. Adapter gems' missing dependencies.
path_to_adapter = "active_record/connection_adapters/#{spec[:adapter]}_adapter"
begin
require path_to_adapter
- rescue Gem::LoadError => e
- raise Gem::LoadError, "Specified '#{spec[:adapter]}' for database adapter, but the gem is not loaded. Add `gem '#{e.name}'` to your Gemfile (and ensure its version is at the minimum required by ActiveRecord)."
rescue LoadError => e
- raise LoadError, "Could not load '#{path_to_adapter}'. Make sure that the adapter in config/database.yml is valid. If you use an adapter other than 'mysql2', 'postgresql' or 'sqlite3' add the necessary adapter gem to the Gemfile.", e.backtrace
+ # We couldn't require the adapter itself. Raise an exception that
+ # points out config typos and missing gems.
+ if e.path == path_to_adapter
+ # We can assume that a non-builtin adapter was specified, so it's
+ # either misspelled or missing from Gemfile.
+ raise LoadError, "Could not load the '#{spec[:adapter]}' Active Record adapter. Ensure that the adapter is spelled correctly in config/database.yml and that you've added the necessary adapter gem to your Gemfile.", e.backtrace
+
+ # Bubbled up from the adapter require. Prefix the exception message
+ # with some guidance about how to address it and reraise.
+ else
+ raise LoadError, "Error loading the '#{spec[:adapter]}' Active Record adapter. Missing a gem it depends on? #{e.message}", e.backtrace
+ end
end
adapter_method = "#{spec[:adapter]}_connection"
- ConnectionSpecification.new(spec, adapter_method)
+
+ unless ActiveRecord::Base.respond_to?(adapter_method)
+ raise AdapterNotFound, "database configuration specifies nonexistent #{spec.config[:adapter]} adapter"
+ end
+
+ ConnectionSpecification.new(spec.delete(:name) || "primary", spec, adapter_method)
end
private
+ # Returns fully resolved connection, accepts hash, string or symbol.
+ # Always returns a hash.
+ #
+ # == Examples
+ #
+ # Symbol representing current environment.
+ #
+ # Resolver.new("production" => {}).resolve_connection(:production)
+ # # => {}
+ #
+ # One layer deep hash of connection values.
+ #
+ # Resolver.new({}).resolve_connection("adapter" => "sqlite3")
+ # # => { "adapter" => "sqlite3" }
+ #
+ # Connection URL.
+ #
+ # Resolver.new({}).resolve_connection("postgresql://localhost/foo")
+ # # => { "host" => "localhost", "database" => "foo", "adapter" => "postgresql" }
+ #
+ def resolve_connection(config_or_env, pool_name = nil)
+ case config_or_env
+ when Symbol
+ resolve_symbol_connection config_or_env, pool_name
+ when String
+ resolve_url_connection config_or_env
+ when Hash
+ resolve_hash_connection config_or_env
+ else
+ resolve_connection config_or_env
+ end
+ end
- # Returns fully resolved connection, accepts hash, string or symbol.
- # Always returns a hash.
- #
- # == Examples
- #
- # Symbol representing current environment.
- #
- # Resolver.new("production" => {}).resolve_connection(:production)
- # # => {}
- #
- # One layer deep hash of connection values.
- #
- # Resolver.new({}).resolve_connection("adapter" => "sqlite3")
- # # => { "adapter" => "sqlite3" }
- #
- # Connection URL.
- #
- # Resolver.new({}).resolve_connection("postgresql://localhost/foo")
- # # => { "host" => "localhost", "database" => "foo", "adapter" => "postgresql" }
- #
- def resolve_connection(spec)
- case spec
- when Symbol
- resolve_symbol_connection spec
- when String
- resolve_url_connection spec
- when Hash
- resolve_hash_connection spec
+ # Takes the environment such as +:production+ or +:development+ and a
+ # pool name the corresponds to the name given by the connection pool
+ # to the connection. That pool name is merged into the hash with the
+ # name key.
+ #
+ # This requires that the @configurations was initialized with a key that
+ # matches.
+ #
+ # configurations = #<ActiveRecord::DatabaseConfigurations:0x00007fd9fdace3e0
+ # @configurations=[
+ # #<ActiveRecord::DatabaseConfigurations::HashConfig:0x00007fd9fdace250
+ # @env_name="production", @spec_name="primary", @config={"database"=>"my_db"}>
+ # ]>
+ #
+ # Resolver.new(configurations).resolve_symbol_connection(:production, "primary")
+ # # => { "database" => "my_db" }
+ def resolve_symbol_connection(env_name, pool_name)
+ db_config = configurations.find_db_config(env_name)
+
+ if db_config
+ resolve_connection(db_config.config).merge("name" => pool_name.to_s)
+ else
+ raise AdapterNotSpecified, <<~MSG
+ The `#{env_name}` database is not configured for the `#{ActiveRecord::ConnectionHandling::DEFAULT_ENV.call}` environment.
+
+ Available databases configurations are:
+
+ #{build_configuration_sentence}
+ MSG
+ end
end
- end
- # Takes the environment such as +:production+ or +:development+.
- # This requires that the @configurations was initialized with a key that
- # matches.
- #
- # Resolver.new("production" => {}).resolve_symbol_connection(:production)
- # # => {}
- #
- def resolve_symbol_connection(spec)
- if config = configurations[spec.to_s]
- resolve_connection(config)
- else
- raise(AdapterNotSpecified, "'#{spec}' database is not configured. Available: #{configurations.keys.inspect}")
+ def build_configuration_sentence # :nodoc:
+ configs = configurations.configs_for(include_replicas: true)
+
+ configs.group_by(&:env_name).map do |env, config|
+ namespaces = config.map(&:spec_name)
+ if namespaces.size > 1
+ "#{env}: #{namespaces.join(", ")}"
+ else
+ env
+ end
+ end.join("\n")
end
- end
- # Accepts a hash. Expands the "url" key that contains a
- # URL database connection to a full connection
- # hash and merges with the rest of the hash.
- # Connection details inside of the "url" key win any merge conflicts
- def resolve_hash_connection(spec)
- if spec["url"] && spec["url"] !~ /^jdbc:/
- connection_hash = resolve_url_connection(spec.delete("url"))
- spec.merge!(connection_hash)
+ # Accepts a hash. Expands the "url" key that contains a
+ # URL database connection to a full connection
+ # hash and merges with the rest of the hash.
+ # Connection details inside of the "url" key win any merge conflicts
+ def resolve_hash_connection(spec)
+ if spec["url"] && spec["url"] !~ /^jdbc:/
+ connection_hash = resolve_url_connection(spec.delete("url"))
+ spec.merge!(connection_hash)
+ end
+ spec
end
- spec
- end
- # Takes a connection URL.
- #
- # Resolver.new({}).resolve_url_connection("postgresql://localhost/foo")
- # # => { "host" => "localhost", "database" => "foo", "adapter" => "postgresql" }
- #
- def resolve_url_connection(url)
- ConnectionUrlResolver.new(url).to_hash
- end
+ # Takes a connection URL.
+ #
+ # Resolver.new({}).resolve_url_connection("postgresql://localhost/foo")
+ # # => { "host" => "localhost", "database" => "foo", "adapter" => "postgresql" }
+ #
+ def resolve_url_connection(url)
+ ConnectionUrlResolver.new(url).to_hash
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/determine_if_preparable_visitor.rb b/activerecord/lib/active_record/connection_adapters/determine_if_preparable_visitor.rb
index 0fdc185c45..1df4dea2d8 100644
--- a/activerecord/lib/active_record/connection_adapters/determine_if_preparable_visitor.rb
+++ b/activerecord/lib/active_record/connection_adapters/determine_if_preparable_visitor.rb
@@ -1,14 +1,21 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module DetermineIfPreparableVisitor
- attr_reader :preparable
+ attr_accessor :preparable
def accept(*)
@preparable = true
super
end
- def visit_Arel_Nodes_In(*)
+ def visit_Arel_Nodes_In(o, collector)
+ @preparable = false
+ super
+ end
+
+ def visit_Arel_Nodes_NotIn(o, collector)
@preparable = false
super
end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/column.rb b/activerecord/lib/active_record/connection_adapters/mysql/column.rb
index 9c45fdd44a..fa1541019d 100644
--- a/activerecord/lib/active_record/connection_adapters/mysql/column.rb
+++ b/activerecord/lib/active_record/connection_adapters/mysql/column.rb
@@ -1,48 +1,25 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module MySQL
class Column < ConnectionAdapters::Column # :nodoc:
- delegate :strict, :extra, to: :sql_type_metadata, allow_nil: true
-
- def initialize(*)
- super
- assert_valid_default
- extract_default
- end
-
- def has_default?
- return false if blob_or_text_column? # MySQL forbids defaults on blob and text columns
- super
- end
-
- def blob_or_text_column?
- /\A(?:tiny|medium|long)?blob\b/ === sql_type || type == :text
- end
+ delegate :extra, to: :sql_type_metadata, allow_nil: true
def unsigned?
- /\bunsigned\z/ === sql_type
+ /\bunsigned(?: zerofill)?\z/.match?(sql_type)
end
def case_sensitive?
- collation && collation !~ /_ci\z/
+ collation && !/_ci\z/.match?(collation)
end
def auto_increment?
- extra == 'auto_increment'
- end
-
- private
-
- def extract_default
- if blob_or_text_column?
- @default = null || strict ? nil : ''
- end
+ extra == "auto_increment"
end
- def assert_valid_default
- if blob_or_text_column? && default.present?
- raise ArgumentError, "#{type} columns cannot have a default value: #{default.inspect}"
- end
+ def virtual?
+ /\b(?:VIRTUAL|STORED|PERSISTENT)\b/.match?(extra)
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/database_statements.rb b/activerecord/lib/active_record/connection_adapters/mysql/database_statements.rb
new file mode 100644
index 0000000000..1199c0ad1b
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/mysql/database_statements.rb
@@ -0,0 +1,201 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module MySQL
+ module DatabaseStatements
+ # Returns an ActiveRecord::Result instance.
+ def select_all(*) # :nodoc:
+ result = if ExplainRegistry.collect? && prepared_statements
+ unprepared_statement { super }
+ else
+ super
+ end
+ @connection.abandon_results!
+ result
+ end
+
+ def query(sql, name = nil) # :nodoc:
+ execute(sql, name).to_a
+ end
+
+ READ_QUERY = ActiveRecord::ConnectionAdapters::AbstractAdapter.build_read_query_regexp(:begin, :commit, :explain, :select, :set, :show, :release, :savepoint, :rollback) # :nodoc:
+ private_constant :READ_QUERY
+
+ def write_query?(sql) # :nodoc:
+ !READ_QUERY.match?(sql)
+ end
+
+ # Executes the SQL statement in the context of this connection.
+ def execute(sql, name = nil)
+ if preventing_writes? && write_query?(sql)
+ raise ActiveRecord::ReadOnlyError, "Write query attempted while in readonly mode: #{sql}"
+ end
+
+ # make sure we carry over any changes to ActiveRecord::Base.default_timezone that have been
+ # made since we established the connection
+ @connection.query_options[:database_timezone] = ActiveRecord::Base.default_timezone
+
+ super
+ end
+
+ def exec_query(sql, name = "SQL", binds = [], prepare: false)
+ if without_prepared_statement?(binds)
+ execute_and_free(sql, name) do |result|
+ if result
+ ActiveRecord::Result.new(result.fields, result.to_a)
+ else
+ ActiveRecord::Result.new([], [])
+ end
+ end
+ else
+ exec_stmt_and_free(sql, name, binds, cache_stmt: prepare) do |_, result|
+ if result
+ ActiveRecord::Result.new(result.fields, result.to_a)
+ else
+ ActiveRecord::Result.new([], [])
+ end
+ end
+ end
+ end
+
+ def exec_delete(sql, name = nil, binds = [])
+ if without_prepared_statement?(binds)
+ execute_and_free(sql, name) { @connection.affected_rows }
+ else
+ exec_stmt_and_free(sql, name, binds) { |stmt| stmt.affected_rows }
+ end
+ end
+ alias :exec_update :exec_delete
+
+ private
+ def execute_batch(sql, name = nil)
+ super
+ @connection.abandon_results!
+ end
+
+ def default_insert_value(column)
+ super unless column.auto_increment?
+ end
+
+ def last_inserted_id(result)
+ @connection.last_id
+ end
+
+ def supports_set_server_option?
+ @connection.respond_to?(:set_server_option)
+ end
+
+ def build_truncate_statements(*table_names)
+ if table_names.size == 1
+ super.first
+ else
+ super
+ end
+ end
+
+ def multi_statements_enabled?(flags)
+ if flags.is_a?(Array)
+ flags.include?("MULTI_STATEMENTS")
+ else
+ (flags & Mysql2::Client::MULTI_STATEMENTS) != 0
+ end
+ end
+
+ def with_multi_statements
+ previous_flags = @config[:flags]
+
+ unless multi_statements_enabled?(previous_flags)
+ if supports_set_server_option?
+ @connection.set_server_option(Mysql2::Client::OPTION_MULTI_STATEMENTS_ON)
+ else
+ @config[:flags] = Mysql2::Client::MULTI_STATEMENTS
+ reconnect!
+ end
+ end
+
+ yield
+ ensure
+ unless multi_statements_enabled?(previous_flags)
+ if supports_set_server_option?
+ @connection.set_server_option(Mysql2::Client::OPTION_MULTI_STATEMENTS_OFF)
+ else
+ @config[:flags] = previous_flags
+ reconnect!
+ end
+ end
+ end
+
+ def combine_multi_statements(total_sql)
+ total_sql.each_with_object([]) do |sql, total_sql_chunks|
+ previous_packet = total_sql_chunks.last
+ if max_allowed_packet_reached?(sql, previous_packet)
+ total_sql_chunks << +sql
+ else
+ previous_packet << ";\n"
+ previous_packet << sql
+ end
+ end
+ end
+
+ def max_allowed_packet_reached?(current_packet, previous_packet)
+ if current_packet.bytesize > max_allowed_packet
+ raise ActiveRecordError,
+ "Fixtures set is too large #{current_packet.bytesize}. Consider increasing the max_allowed_packet variable."
+ elsif previous_packet.nil?
+ true
+ else
+ (current_packet.bytesize + previous_packet.bytesize) > max_allowed_packet
+ end
+ end
+
+ def max_allowed_packet
+ @max_allowed_packet ||= begin
+ bytes_margin = 2
+ show_variable("max_allowed_packet") - bytes_margin
+ end
+ end
+
+ def exec_stmt_and_free(sql, name, binds, cache_stmt: false)
+ if preventing_writes? && write_query?(sql)
+ raise ActiveRecord::ReadOnlyError, "Write query attempted while in readonly mode: #{sql}"
+ end
+
+ materialize_transactions
+
+ # make sure we carry over any changes to ActiveRecord::Base.default_timezone that have been
+ # made since we established the connection
+ @connection.query_options[:database_timezone] = ActiveRecord::Base.default_timezone
+
+ type_casted_binds = type_casted_binds(binds)
+
+ log(sql, name, binds, type_casted_binds) do
+ if cache_stmt
+ stmt = @statements[sql] ||= @connection.prepare(sql)
+ else
+ stmt = @connection.prepare(sql)
+ end
+
+ begin
+ result = ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ stmt.execute(*type_casted_binds)
+ end
+ rescue Mysql2::Error => e
+ if cache_stmt
+ @statements.delete(sql)
+ else
+ stmt.close
+ end
+ raise e
+ end
+
+ ret = yield stmt, result
+ result.free if result
+ stmt.close unless cache_stmt
+ ret
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/explain_pretty_printer.rb b/activerecord/lib/active_record/connection_adapters/mysql/explain_pretty_printer.rb
index 1820853196..20c3c83664 100644
--- a/activerecord/lib/active_record/connection_adapters/mysql/explain_pretty_printer.rb
+++ b/activerecord/lib/active_record/connection_adapters/mysql/explain_pretty_printer.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module MySQL
@@ -36,34 +38,34 @@ module ActiveRecord
private
- def compute_column_widths(result)
- [].tap do |widths|
- result.columns.each_with_index do |column, i|
- cells_in_column = [column] + result.rows.map {|r| r[i].nil? ? 'NULL' : r[i].to_s}
- widths << cells_in_column.map(&:length).max
+ def compute_column_widths(result)
+ [].tap do |widths|
+ result.columns.each_with_index do |column, i|
+ cells_in_column = [column] + result.rows.map { |r| r[i].nil? ? "NULL" : r[i].to_s }
+ widths << cells_in_column.map(&:length).max
+ end
end
end
- end
- def build_separator(widths)
- padding = 1
- '+' + widths.map {|w| '-' * (w + (padding*2))}.join('+') + '+'
- end
+ def build_separator(widths)
+ padding = 1
+ "+" + widths.map { |w| "-" * (w + (padding * 2)) }.join("+") + "+"
+ end
- def build_cells(items, widths)
- cells = []
- items.each_with_index do |item, i|
- item = 'NULL' if item.nil?
- justifier = item.is_a?(Numeric) ? 'rjust' : 'ljust'
- cells << item.to_s.send(justifier, widths[i])
+ def build_cells(items, widths)
+ cells = []
+ items.each_with_index do |item, i|
+ item = "NULL" if item.nil?
+ justifier = item.is_a?(Numeric) ? "rjust" : "ljust"
+ cells << item.to_s.send(justifier, widths[i])
+ end
+ "| " + cells.join(" | ") + " |"
end
- '| ' + cells.join(' | ') + ' |'
- end
- def build_footer(nrows, elapsed)
- rows_label = nrows == 1 ? 'row' : 'rows'
- "#{nrows} #{rows_label} in set (%.2f sec)" % elapsed
- end
+ def build_footer(nrows, elapsed)
+ rows_label = nrows == 1 ? "row" : "rows"
+ "#{nrows} #{rows_label} in set (%.2f sec)" % elapsed
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/quoting.rb b/activerecord/lib/active_record/connection_adapters/mysql/quoting.rb
index 7c5980da2a..75564a61d6 100644
--- a/activerecord/lib/active_record/connection_adapters/mysql/quoting.rb
+++ b/activerecord/lib/active_record/connection_adapters/mysql/quoting.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module MySQL
@@ -7,16 +9,33 @@ module ActiveRecord
end
def quote_table_name(name)
- @quoted_table_names[name] ||= super.gsub('.', '`.`')
+ @quoted_table_names[name] ||= super.gsub(".", "`.`").freeze
end
- private
+ def unquoted_true
+ 1
+ end
- def _quote(value)
- if value.is_a?(Type::Binary::Data)
- "x'#{value.hex}'"
- else
+ def unquoted_false
+ 0
+ end
+
+ def quoted_date(value)
+ if supports_datetime_with_precision?
super
+ else
+ super.sub(/\.\d{6}\z/, "")
+ end
+ end
+
+ def quoted_binary(value)
+ "x'#{value.hex}'"
+ end
+
+ def _type_cast(value)
+ case value
+ when Date, Time then value
+ else super
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/schema_creation.rb b/activerecord/lib/active_record/connection_adapters/mysql/schema_creation.rb
index 1e2c859af9..82ed320617 100644
--- a/activerecord/lib/active_record/connection_adapters/mysql/schema_creation.rb
+++ b/activerecord/lib/active_record/connection_adapters/mysql/schema_creation.rb
@@ -1,56 +1,71 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module MySQL
- class SchemaCreation < AbstractAdapter::SchemaCreation
+ class SchemaCreation < AbstractAdapter::SchemaCreation # :nodoc:
+ delegate :add_sql_comment!, :mariadb?, to: :@conn, private: true
+
private
- def visit_DropForeignKey(name)
- "DROP FOREIGN KEY #{name}"
- end
-
- def visit_ColumnDefinition(o)
- o.sql_type = type_to_sql(o.type, o.limit, o.precision, o.scale, o.unsigned)
- super
- end
-
- def visit_AddColumnDefinition(o)
- add_column_position!(super, column_options(o.column))
- end
-
- def visit_ChangeColumnDefinition(o)
- change_column_sql = "CHANGE #{quote_column_name(o.name)} #{accept(o.column)}"
- add_column_position!(change_column_sql, column_options(o.column))
- end
-
- def column_options(o)
- column_options = super
- column_options[:charset] = o.charset
- column_options
- end
-
- def add_column_options!(sql, options)
- if options[:charset]
- sql << " CHARACTER SET #{options[:charset]}"
+ def visit_DropForeignKey(name)
+ "DROP FOREIGN KEY #{name}"
+ end
+
+ def visit_AddColumnDefinition(o)
+ add_column_position!(super, column_options(o.column))
+ end
+
+ def visit_ChangeColumnDefinition(o)
+ change_column_sql = +"CHANGE #{quote_column_name(o.name)} #{accept(o.column)}"
+ add_column_position!(change_column_sql, column_options(o.column))
+ end
+
+ def add_table_options!(create_sql, options)
+ add_sql_comment!(super, options[:comment])
end
- if options[:collation]
- sql << " COLLATE #{options[:collation]}"
+
+ def add_column_options!(sql, options)
+ # By default, TIMESTAMP columns are NOT NULL, cannot contain NULL values,
+ # and assigning NULL assigns the current timestamp. To permit a TIMESTAMP
+ # column to contain NULL, explicitly declare it with the NULL attribute.
+ # See https://dev.mysql.com/doc/refman/5.7/en/timestamp-initialization.html
+ if /\Atimestamp\b/.match?(options[:column].sql_type) && !options[:primary_key]
+ sql << " NULL" unless options[:null] == false || options_include_default?(options)
+ end
+
+ if charset = options[:charset]
+ sql << " CHARACTER SET #{charset}"
+ end
+
+ if collation = options[:collation]
+ sql << " COLLATE #{collation}"
+ end
+
+ if as = options[:as]
+ sql << " AS (#{as})"
+ if options[:stored]
+ sql << (mariadb? ? " PERSISTENT" : " STORED")
+ end
+ end
+
+ add_sql_comment!(super, options[:comment])
end
- super
- end
-
- def add_column_position!(sql, options)
- if options[:first]
- sql << " FIRST"
- elsif options[:after]
- sql << " AFTER #{quote_column_name(options[:after])}"
+
+ def add_column_position!(sql, options)
+ if options[:first]
+ sql << " FIRST"
+ elsif options[:after]
+ sql << " AFTER #{quote_column_name(options[:after])}"
+ end
+
+ sql
end
- sql
- end
- def index_in_create(table_name, column_name, options)
- index_name, index_type, index_columns, _, _, index_using = @conn.add_index_options(table_name, column_name, options)
- "#{index_type} INDEX #{quote_column_name(index_name)} #{index_using} (#{index_columns}) "
- end
+ def index_in_create(table_name, column_name, options)
+ index_name, index_type, index_columns, _, _, index_using, comment = @conn.add_index_options(table_name, column_name, options)
+ add_sql_comment!((+"#{index_type} INDEX #{quote_column_name(index_name)} #{index_using} (#{index_columns})"), comment)
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/schema_definitions.rb b/activerecord/lib/active_record/connection_adapters/mysql/schema_definitions.rb
index 157e75dbf7..d21535a709 100644
--- a/activerecord/lib/active_record/connection_adapters/mysql/schema_definitions.rb
+++ b/activerecord/lib/active_record/connection_adapters/mysql/schema_definitions.rb
@@ -1,88 +1,90 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module MySQL
module ColumnMethods
- def primary_key(name, type = :primary_key, **options)
- options[:auto_increment] = true if type == :bigint && !options.key?(:default)
- super
- end
+ extend ActiveSupport::Concern
- def blob(*args, **options)
- args.each { |name| column(name, :blob, options) }
- end
+ ##
+ # :method: blob
+ # :call-seq: blob(*names, **options)
- def tinyblob(*args, **options)
- args.each { |name| column(name, :tinyblob, options) }
- end
+ ##
+ # :method: tinyblob
+ # :call-seq: tinyblob(*names, **options)
- def mediumblob(*args, **options)
- args.each { |name| column(name, :mediumblob, options) }
- end
+ ##
+ # :method: mediumblob
+ # :call-seq: mediumblob(*names, **options)
- def longblob(*args, **options)
- args.each { |name| column(name, :longblob, options) }
- end
+ ##
+ # :method: longblob
+ # :call-seq: longblob(*names, **options)
- def tinytext(*args, **options)
- args.each { |name| column(name, :tinytext, options) }
- end
+ ##
+ # :method: tinytext
+ # :call-seq: tinytext(*names, **options)
- def mediumtext(*args, **options)
- args.each { |name| column(name, :mediumtext, options) }
- end
+ ##
+ # :method: mediumtext
+ # :call-seq: mediumtext(*names, **options)
- def longtext(*args, **options)
- args.each { |name| column(name, :longtext, options) }
- end
+ ##
+ # :method: longtext
+ # :call-seq: longtext(*names, **options)
- def json(*args, **options)
- args.each { |name| column(name, :json, options) }
- end
+ ##
+ # :method: unsigned_integer
+ # :call-seq: unsigned_integer(*names, **options)
- def unsigned_integer(*args, **options)
- args.each { |name| column(name, :unsigned_integer, options) }
- end
+ ##
+ # :method: unsigned_bigint
+ # :call-seq: unsigned_bigint(*names, **options)
- def unsigned_bigint(*args, **options)
- args.each { |name| column(name, :unsigned_bigint, options) }
- end
+ ##
+ # :method: unsigned_float
+ # :call-seq: unsigned_float(*names, **options)
- def unsigned_float(*args, **options)
- args.each { |name| column(name, :unsigned_float, options) }
- end
+ ##
+ # :method: unsigned_decimal
+ # :call-seq: unsigned_decimal(*names, **options)
- def unsigned_decimal(*args, **options)
- args.each { |name| column(name, :unsigned_decimal, options) }
+ included do
+ define_column_methods :blob, :tinyblob, :mediumblob, :longblob,
+ :tinytext, :mediumtext, :longtext, :unsigned_integer, :unsigned_bigint,
+ :unsigned_float, :unsigned_decimal
end
end
- class ColumnDefinition < ActiveRecord::ConnectionAdapters::ColumnDefinition
- attr_accessor :charset, :unsigned
- end
-
class TableDefinition < ActiveRecord::ConnectionAdapters::TableDefinition
include ColumnMethods
- def new_column_definition(name, type, options) # :nodoc:
- column = super
- case column.type
+ def new_column_definition(name, type, **options) # :nodoc:
+ case type
+ when :virtual
+ type = options[:type]
when :primary_key
- column.type = :integer
- column.auto_increment = true
+ type = :integer
+ options[:limit] ||= 8
+ options[:primary_key] = true
when /\Aunsigned_(?<type>.+)\z/
- column.type = $~[:type].to_sym
- column.unsigned = true
+ type = $~[:type].to_sym
+ options[:unsigned] = true
end
- column.unsigned ||= options[:unsigned]
- column.charset = options[:charset]
- column
+
+ super
end
private
+ def aliased_types(name, fallback)
+ fallback
+ end
- def create_column_definition(name, type)
- MySQL::ColumnDefinition.new(name, type)
- end
+ def integer_like_primary_key_type(type, options)
+ options[:auto_increment] = true
+ type
+ end
end
class Table < ActiveRecord::ConnectionAdapters::Table
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/schema_dumper.rb b/activerecord/lib/active_record/connection_adapters/mysql/schema_dumper.rb
index be40df4101..57518b02fa 100644
--- a/activerecord/lib/active_record/connection_adapters/mysql/schema_dumper.rb
+++ b/activerecord/lib/active_record/connection_adapters/mysql/schema_dumper.rb
@@ -1,53 +1,85 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module MySQL
- module ColumnDumper
- def column_spec_for_primary_key(column)
- if column.bigint?
- spec = { id: :bigint.inspect }
- spec[:default] = schema_default(column) || 'nil' unless column.auto_increment?
- else
- spec = super.except!(:null)
- end
- spec[:unsigned] = 'true' if column.unsigned?
- spec
- end
-
- def prepare_column_options(column)
- spec = super
- spec[:unsigned] = 'true' if column.unsigned?
- spec
- end
-
- def migration_keys
- super + [:unsigned]
- end
-
+ class SchemaDumper < ConnectionAdapters::SchemaDumper # :nodoc:
private
+ def prepare_column_options(column)
+ spec = super
+ spec[:unsigned] = "true" if column.unsigned?
+ spec[:auto_increment] = "true" if column.auto_increment?
+
+ if /\A(?<size>tiny|medium|long)(?:text|blob)/ =~ column.sql_type
+ spec = { size: size.to_sym.inspect }.merge!(spec)
+ end
+
+ if @connection.supports_virtual_columns? && column.virtual?
+ spec[:as] = extract_expression_for_virtual_column(column)
+ spec[:stored] = "true" if /\b(?:STORED|PERSISTENT)\b/.match?(column.extra)
+ spec = { type: schema_type(column).inspect }.merge!(spec)
+ end
+
+ spec
+ end
+
+ def column_spec_for_primary_key(column)
+ spec = super
+ spec.delete(:auto_increment) if column.type == :integer && column.auto_increment?
+ spec
+ end
- def default_primary_key?(column)
- super && column.auto_increment?
- end
+ def default_primary_key?(column)
+ super && column.auto_increment? && !column.unsigned?
+ end
+
+ def explicit_primary_key_default?(column)
+ column.type == :integer && !column.auto_increment?
+ end
+
+ def schema_type(column)
+ case column.sql_type
+ when /\Atimestamp\b/
+ :timestamp
+ else
+ super
+ end
+ end
+
+ def schema_limit(column)
+ super unless /\A(?:tiny|medium|long)?(?:text|blob)/.match?(column.sql_type)
+ end
- def schema_type(column)
- if column.sql_type == 'tinyblob'
- :blob
- else
- super
+ def schema_precision(column)
+ super unless /\A(?:date)?time(?:stamp)?\b/.match?(column.sql_type) && column.precision == 0
end
- end
- def schema_precision(column)
- super unless /time/ === column.sql_type && column.precision == 0
- end
+ def schema_collation(column)
+ if column.collation && table_name = column.table_name
+ @table_collation_cache ||= {}
+ @table_collation_cache[table_name] ||=
+ @connection.exec_query("SHOW TABLE STATUS LIKE #{@connection.quote(table_name)}", "SCHEMA").first["Collation"]
+ column.collation.inspect if column.collation != @table_collation_cache[table_name]
+ end
+ end
- def schema_collation(column)
- if column.collation && table_name = column.table_name
- @table_collation_cache ||= {}
- @table_collation_cache[table_name] ||= select_one("SHOW TABLE STATUS LIKE '#{table_name}'")["Collation"]
- column.collation.inspect if column.collation != @table_collation_cache[table_name]
+ def extract_expression_for_virtual_column(column)
+ if @connection.mariadb? && @connection.version < "10.2.5"
+ create_table_info = @connection.send(:create_table_info, column.table_name)
+ column_name = @connection.quote_column_name(column.name)
+ if %r/#{column_name} #{Regexp.quote(column.sql_type)}(?: COLLATE \w+)? AS \((?<expression>.+?)\) #{column.extra}/ =~ create_table_info
+ $~[:expression].inspect
+ end
+ else
+ scope = @connection.send(:quoted_scope, column.table_name)
+ column_name = @connection.quote(column.name)
+ sql = "SELECT generation_expression FROM information_schema.columns" \
+ " WHERE table_schema = #{scope[:schema]}" \
+ " AND table_name = #{scope[:name]}" \
+ " AND column_name = #{column_name}"
+ @connection.query_value(sql, "SCHEMA").inspect
+ end
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/schema_statements.rb b/activerecord/lib/active_record/connection_adapters/mysql/schema_statements.rb
new file mode 100644
index 0000000000..4018f0815c
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/mysql/schema_statements.rb
@@ -0,0 +1,261 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module MySQL
+ module SchemaStatements # :nodoc:
+ # Returns an array of indexes for the given table.
+ def indexes(table_name)
+ indexes = []
+ current_index = nil
+ execute_and_free("SHOW KEYS FROM #{quote_table_name(table_name)}", "SCHEMA") do |result|
+ each_hash(result) do |row|
+ if current_index != row[:Key_name]
+ next if row[:Key_name] == "PRIMARY" # skip the primary key
+ current_index = row[:Key_name]
+
+ mysql_index_type = row[:Index_type].downcase.to_sym
+ case mysql_index_type
+ when :fulltext, :spatial
+ index_type = mysql_index_type
+ when :btree, :hash
+ index_using = mysql_index_type
+ end
+
+ indexes << [
+ row[:Table],
+ row[:Key_name],
+ row[:Non_unique].to_i == 0,
+ [],
+ lengths: {},
+ orders: {},
+ type: index_type,
+ using: index_using,
+ comment: row[:Index_comment].presence
+ ]
+ end
+
+ if row[:Expression]
+ expression = row[:Expression]
+ expression = +"(#{expression})" unless expression.start_with?("(")
+ indexes.last[-2] << expression
+ indexes.last[-1][:expressions] ||= {}
+ indexes.last[-1][:expressions][expression] = expression
+ indexes.last[-1][:orders][expression] = :desc if row[:Collation] == "D"
+ else
+ indexes.last[-2] << row[:Column_name]
+ indexes.last[-1][:lengths][row[:Column_name]] = row[:Sub_part].to_i if row[:Sub_part]
+ indexes.last[-1][:orders][row[:Column_name]] = :desc if row[:Collation] == "D"
+ end
+ end
+ end
+
+ indexes.map do |index|
+ options = index.last
+
+ if expressions = options.delete(:expressions)
+ orders = options.delete(:orders)
+ lengths = options.delete(:lengths)
+
+ columns = index[-2].map { |name|
+ [ name.to_sym, expressions[name] || +quote_column_name(name) ]
+ }.to_h
+
+ index[-2] = add_options_for_index_columns(
+ columns, order: orders, length: lengths
+ ).values.join(", ")
+ end
+
+ IndexDefinition.new(*index)
+ end
+ end
+
+ def remove_column(table_name, column_name, type = nil, options = {})
+ if foreign_key_exists?(table_name, column: column_name)
+ remove_foreign_key(table_name, column: column_name)
+ end
+ super
+ end
+
+ def create_table(table_name, options: default_row_format, **)
+ super
+ end
+
+ def internal_string_options_for_primary_key
+ super.tap do |options|
+ if !row_format_dynamic_by_default? && CHARSETS_OF_4BYTES_MAXLEN.include?(charset)
+ options[:collation] = collation.sub(/\A[^_]+/, "utf8")
+ end
+ end
+ end
+
+ def update_table_definition(table_name, base)
+ MySQL::Table.new(table_name, base)
+ end
+
+ def create_schema_dumper(options)
+ MySQL::SchemaDumper.create(self, options)
+ end
+
+ # Maps logical Rails types to MySQL-specific data types.
+ def type_to_sql(type, limit: nil, precision: nil, scale: nil, size: limit_to_size(limit, type), unsigned: nil, **)
+ sql =
+ case type.to_s
+ when "integer"
+ integer_to_sql(limit)
+ when "text"
+ type_with_size_to_sql("text", size)
+ when "blob"
+ type_with_size_to_sql("blob", size)
+ when "binary"
+ if (0..0xfff) === limit
+ "varbinary(#{limit})"
+ else
+ type_with_size_to_sql("blob", size)
+ end
+ else
+ super
+ end
+
+ sql = "#{sql} unsigned" if unsigned && type != :primary_key
+ sql
+ end
+
+ private
+ CHARSETS_OF_4BYTES_MAXLEN = ["utf8mb4", "utf16", "utf16le", "utf32"]
+
+ def row_format_dynamic_by_default?
+ if mariadb?
+ version >= "10.2.2"
+ else
+ version >= "5.7.9"
+ end
+ end
+
+ def default_row_format
+ return if row_format_dynamic_by_default?
+
+ unless defined?(@default_row_format)
+ if query_value("SELECT @@innodb_file_per_table = 1 AND @@innodb_file_format = 'Barracuda'") == 1
+ @default_row_format = "ROW_FORMAT=DYNAMIC"
+ else
+ @default_row_format = nil
+ end
+ end
+
+ @default_row_format
+ end
+
+ def schema_creation
+ MySQL::SchemaCreation.new(self)
+ end
+
+ def create_table_definition(*args)
+ MySQL::TableDefinition.new(self, *args)
+ end
+
+ def new_column_from_field(table_name, field)
+ type_metadata = fetch_type_metadata(field[:Type], field[:Extra])
+ default, default_function = field[:Default], nil
+
+ if type_metadata.type == :datetime && /\ACURRENT_TIMESTAMP(?:\([0-6]?\))?\z/i.match?(default)
+ default, default_function = nil, default
+ elsif type_metadata.extra == "DEFAULT_GENERATED"
+ default = +"(#{default})" unless default.start_with?("(")
+ default, default_function = nil, default
+ end
+
+ MySQL::Column.new(
+ field[:Field],
+ default,
+ type_metadata,
+ field[:Null] == "YES",
+ table_name,
+ default_function,
+ field[:Collation],
+ comment: field[:Comment].presence
+ )
+ end
+
+ def fetch_type_metadata(sql_type, extra = "")
+ MySQL::TypeMetadata.new(super(sql_type), extra: extra)
+ end
+
+ def extract_foreign_key_action(specifier)
+ super unless specifier == "RESTRICT"
+ end
+
+ def add_index_length(quoted_columns, **options)
+ lengths = options_for_index_columns(options[:length])
+ quoted_columns.each do |name, column|
+ column << "(#{lengths[name]})" if lengths[name].present?
+ end
+ end
+
+ def add_options_for_index_columns(quoted_columns, **options)
+ quoted_columns = add_index_length(quoted_columns, options)
+ super
+ end
+
+ def data_source_sql(name = nil, type: nil)
+ scope = quoted_scope(name, type: type)
+
+ sql = +"SELECT table_name FROM information_schema.tables"
+ sql << " WHERE table_schema = #{scope[:schema]}"
+ sql << " AND table_name = #{scope[:name]}" if scope[:name]
+ sql << " AND table_type = #{scope[:type]}" if scope[:type]
+ sql
+ end
+
+ def quoted_scope(name = nil, type: nil)
+ schema, name = extract_schema_qualified_name(name)
+ scope = {}
+ scope[:schema] = schema ? quote(schema) : "database()"
+ scope[:name] = quote(name) if name
+ scope[:type] = quote(type) if type
+ scope
+ end
+
+ def extract_schema_qualified_name(string)
+ schema, name = string.to_s.scan(/[^`.\s]+|`[^`]*`/)
+ schema, name = nil, schema unless name
+ [schema, name]
+ end
+
+ def type_with_size_to_sql(type, size)
+ case size&.to_s
+ when nil, "tiny", "medium", "long"
+ "#{size}#{type}"
+ else
+ raise ArgumentError,
+ "#{size.inspect} is invalid :size value. Only :tiny, :medium, and :long are allowed."
+ end
+ end
+
+ def limit_to_size(limit, type)
+ case type.to_s
+ when "text", "blob", "binary"
+ case limit
+ when 0..0xff; "tiny"
+ when nil, 0x100..0xffff; nil
+ when 0x10000..0xffffff; "medium"
+ when 0x1000000..0xffffffff; "long"
+ else raise ActiveRecordError, "No #{type} type has byte size #{limit}"
+ end
+ end
+ end
+
+ def integer_to_sql(limit)
+ case limit
+ when 1; "tinyint"
+ when 2; "smallint"
+ when 3; "mediumint"
+ when nil, 4; "int"
+ when 5..8; "bigint"
+ else raise ActiveRecordError, "No integer type has byte size #{limit}. Use a decimal with scale 0 instead."
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql/type_metadata.rb b/activerecord/lib/active_record/connection_adapters/mysql/type_metadata.rb
index e1e3f7b472..7ad0944d51 100644
--- a/activerecord/lib/active_record/connection_adapters/mysql/type_metadata.rb
+++ b/activerecord/lib/active_record/connection_adapters/mysql/type_metadata.rb
@@ -1,14 +1,17 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module MySQL
class TypeMetadata < DelegateClass(SqlTypeMetadata) # :nodoc:
- attr_reader :extra, :strict
+ undef to_yaml if method_defined?(:to_yaml)
+
+ attr_reader :extra
- def initialize(type_metadata, extra: "", strict: false)
+ def initialize(type_metadata, extra: "")
super(type_metadata)
@type_metadata = type_metadata
@extra = extra
- @strict = strict
end
def ==(other)
@@ -23,9 +26,9 @@ module ActiveRecord
protected
- def attributes_for_hash
- [self.class, @type_metadata, extra, strict]
- end
+ def attributes_for_hash
+ [self.class, @type_metadata, extra]
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql2_adapter.rb b/activerecord/lib/active_record/connection_adapters/mysql2_adapter.rb
index e7541748de..9bdaa00336 100644
--- a/activerecord/lib/active_record/connection_adapters/mysql2_adapter.rb
+++ b/activerecord/lib/active_record/connection_adapters/mysql2_adapter.rb
@@ -1,23 +1,22 @@
-require 'active_record/connection_adapters/abstract_mysql_adapter'
+# frozen_string_literal: true
-gem 'mysql2', '>= 0.3.18', '< 0.5'
-require 'mysql2'
+require "active_record/connection_adapters/abstract_mysql_adapter"
+require "active_record/connection_adapters/mysql/database_statements"
+
+gem "mysql2", ">= 0.4.4"
+require "mysql2"
module ActiveRecord
module ConnectionHandling # :nodoc:
# Establishes a connection to the database that's used by all Active Record objects.
def mysql2_connection(config)
config = config.symbolize_keys
-
- config[:username] = 'root' if config[:username].nil?
config[:flags] ||= 0
- if Mysql2::Client.const_defined? :FOUND_ROWS
- if config[:flags].kind_of? Array
- config[:flags].push "FOUND_ROWS".freeze
- else
- config[:flags] |= Mysql2::Client::FOUND_ROWS
- end
+ if config[:flags].kind_of? Array
+ config[:flags].push "FOUND_ROWS"
+ else
+ config[:flags] |= Mysql2::Client::FOUND_ROWS
end
client = Mysql2::Client.new(config)
@@ -33,23 +32,41 @@ module ActiveRecord
module ConnectionAdapters
class Mysql2Adapter < AbstractMysqlAdapter
- ADAPTER_NAME = 'Mysql2'.freeze
+ ADAPTER_NAME = "Mysql2"
+
+ include MySQL::DatabaseStatements
def initialize(connection, logger, connection_options, config)
super
- @prepared_statements = false
+ @prepared_statements = false unless config.key?(:prepared_statements)
configure_connection
end
def supports_json?
- !mariadb? && version >= '5.7.8'
+ !mariadb? && version >= "5.7.8"
+ end
+
+ def supports_comments?
+ true
+ end
+
+ def supports_comments_in_create?
+ true
+ end
+
+ def supports_savepoints?
+ true
+ end
+
+ def supports_lazy_transactions?
+ true
end
# HELPER METHODS ===========================================
def each_hash(result) # :nodoc:
if block_given?
- result.each(:as => :hash, :symbolize_keys => true) do |row|
+ result.each(as: :hash, symbolize_keys: true) do |row|
yield row
end
else
@@ -74,7 +91,6 @@ module ActiveRecord
#++
def active?
- return false unless @connection
@connection.ping
end
@@ -89,76 +105,29 @@ module ActiveRecord
# Otherwise, this method does nothing.
def disconnect!
super
- unless @connection.nil?
- @connection.close
- @connection = nil
- end
- end
-
- #--
- # DATABASE STATEMENTS ======================================
- #++
-
- # Returns a record hash with the column names as keys and column values
- # as values.
- def select_one(arel, name = nil, binds = [])
- arel, binds = binds_from_relation(arel, binds)
- execute(to_sql(arel, binds), name).each(as: :hash) do |row|
- @connection.next_result while @connection.more_results?
- return row
- end
- end
-
- # Returns an array of arrays containing the field values.
- # Order is the same as that returned by +columns+.
- def select_rows(sql, name = nil, binds = [])
- result = execute(sql, name)
- @connection.next_result while @connection.more_results?
- result.to_a
- end
-
- # Executes the SQL statement in the context of this connection.
- def execute(sql, name = nil)
- if @connection
- # make sure we carry over any changes to ActiveRecord::Base.default_timezone that have been
- # made since we established the connection
- @connection.query_options[:database_timezone] = ActiveRecord::Base.default_timezone
- end
-
- super
- end
-
- def exec_query(sql, name = 'SQL', binds = [], prepare: false)
- result = execute(sql, name)
- @connection.next_result while @connection.more_results?
- ActiveRecord::Result.new(result.fields, result.to_a) if result
+ @connection.close
end
- def exec_delete(sql, name, binds)
- execute to_sql(sql, binds), name
- @connection.affected_rows
- end
- alias :exec_update :exec_delete
-
- def last_inserted_id(result)
- @connection.last_id
+ def discard! # :nodoc:
+ @connection.automatic_close = false
+ @connection = nil
end
private
- def connect
- @connection = Mysql2::Client.new(@config)
- configure_connection
- end
+ def connect
+ @connection = Mysql2::Client.new(@config)
+ configure_connection
+ end
- def configure_connection
- @connection.query_options.merge!(:as => :array)
- super
- end
+ def configure_connection
+ @connection.query_options[:as] = :array
+ super
+ end
- def full_version
- @full_version ||= @connection.server_info[:version]
- end
+ def full_version
+ @full_version ||= @connection.server_info[:version]
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/column.rb b/activerecord/lib/active_record/connection_adapters/postgresql/column.rb
index 3ad1911a28..3ccc7271ab 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/column.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/column.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
# PostgreSQL-specific extensions to column definitions in a table.
@@ -5,11 +7,37 @@ module ActiveRecord
delegate :array, :oid, :fmod, to: :sql_type_metadata
alias :array? :array
+ def initialize(*, max_identifier_length: 63, **)
+ super
+ @max_identifier_length = max_identifier_length
+ end
+
def serial?
return unless default_function
- %r{\Anextval\('"?#{table_name}_#{name}_seq"?'::regclass\)\z} === default_function
+ if %r{\Anextval\('"?(?<sequence_name>.+_(?<suffix>seq\d*))"?'::regclass\)\z} =~ default_function
+ sequence_name_from_parts(table_name, name, suffix) == sequence_name
+ end
end
+
+ private
+ attr_reader :max_identifier_length
+
+ def sequence_name_from_parts(table_name, column_name, suffix)
+ over_length = [table_name, column_name, suffix].map(&:length).sum + 2 - max_identifier_length
+
+ if over_length > 0
+ column_name_length = [(max_identifier_length - suffix.length - 2) / 2, column_name.length].min
+ over_length -= column_name.length - column_name_length
+ column_name = column_name[0, column_name_length - [over_length, 0].min]
+ end
+
+ if over_length > 0
+ table_name = table_name[0, table_name.length - over_length]
+ end
+
+ "#{table_name}_#{column_name}_#{suffix}"
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/database_statements.rb b/activerecord/lib/active_record/connection_adapters/postgresql/database_statements.rb
index 6f2e03b370..ae7dbd2868 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/database_statements.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/database_statements.rb
@@ -1,38 +1,12 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
module DatabaseStatements
def explain(arel, binds = [])
sql = "EXPLAIN #{to_sql(arel, binds)}"
- PostgreSQL::ExplainPrettyPrinter.new.pp(exec_query(sql, 'EXPLAIN', binds))
- end
-
- def select_value(arel, name = nil, binds = [])
- arel, binds = binds_from_relation arel, binds
- sql = to_sql(arel, binds)
- execute_and_clear(sql, name, binds) do |result|
- result.getvalue(0, 0) if result.ntuples > 0 && result.nfields > 0
- end
- end
-
- def select_values(arel, name = nil, binds = [])
- arel, binds = binds_from_relation arel, binds
- sql = to_sql(arel, binds)
- execute_and_clear(sql, name, binds) do |result|
- if result.nfields > 0
- result.column_values(0)
- else
- []
- end
- end
- end
-
- # Executes a SELECT query and returns an array of rows. Each row is an
- # array of field values.
- def select_rows(sql, name = nil, binds = [])
- execute_and_clear(sql, name, binds) do |result|
- result.values
- end
+ PostgreSQL::ExplainPrettyPrinter.new.pp(exec_query(sql, "EXPLAIN", binds))
end
# The internal PostgreSQL identifier of the money data type.
@@ -74,9 +48,9 @@ module ActiveRecord
# (2) $12.345.678,12
case data
when /^-?\D+[\d,]+\.\d{2}$/ # (1)
- data.gsub!(/[^-\d.]/, '')
+ data.gsub!(/[^-\d.]/, "")
when /^-?\D+[\d.]+,\d{2}$/ # (2)
- data.gsub!(/[^-\d,]/, '').sub!(/,/, '.')
+ data.gsub!(/[^-\d,]/, "").sub!(/,/, ".")
end
end
end
@@ -84,22 +58,41 @@ module ActiveRecord
# Queries the database and returns the results in an Array-like object
def query(sql, name = nil) #:nodoc:
+ materialize_transactions
+
log(sql, name) do
- result_as_array @connection.async_exec(sql)
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ result_as_array @connection.async_exec(sql)
+ end
end
end
- # Executes an SQL statement, returning a PGresult object on success
- # or raising a PGError exception otherwise.
- # Note: the PGresult object is manually memory managed; if you don't
- # need it specifically, you many want consider the exec_query wrapper.
+ READ_QUERY = ActiveRecord::ConnectionAdapters::AbstractAdapter.build_read_query_regexp(:begin, :commit, :explain, :select, :set, :show, :release, :savepoint, :rollback) # :nodoc:
+ private_constant :READ_QUERY
+
+ def write_query?(sql) # :nodoc:
+ !READ_QUERY.match?(sql)
+ end
+
+ # Executes an SQL statement, returning a PG::Result object on success
+ # or raising a PG::Error exception otherwise.
+ # Note: the PG::Result object is manually memory managed; if you don't
+ # need it specifically, you may want consider the <tt>exec_query</tt> wrapper.
def execute(sql, name = nil)
+ if preventing_writes? && write_query?(sql)
+ raise ActiveRecord::ReadOnlyError, "Write query attempted while in readonly mode: #{sql}"
+ end
+
+ materialize_transactions
+
log(sql, name) do
- @connection.async_exec(sql)
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ @connection.async_exec(sql)
+ end
end
end
- def exec_query(sql, name = 'SQL', binds = [], prepare: false)
+ def exec_query(sql, name = "SQL", binds = [], prepare: false)
execute_and_clear(sql, name, binds, prepare: prepare) do |result|
types = {}
fields = result.fields
@@ -112,36 +105,41 @@ module ActiveRecord
end
end
- def exec_delete(sql, name = 'SQL', binds = [])
- execute_and_clear(sql, name, binds) {|result| result.cmd_tuples }
+ def exec_delete(sql, name = nil, binds = [])
+ execute_and_clear(sql, name, binds) { |result| result.cmd_tuples }
end
alias :exec_update :exec_delete
- def sql_for_insert(sql, pk, id_value, sequence_name, binds) # :nodoc:
+ def sql_for_insert(sql, pk, sequence_name, binds) # :nodoc:
if pk.nil?
# Extract the table from the insert sql. Yuck.
table_ref = extract_table_ref_from_insert_sql(sql)
pk = primary_key(table_ref) if table_ref
end
- if pk && use_insert_returning?
+ if pk = suppress_composite_primary_key(pk)
sql = "#{sql} RETURNING #{quote_column_name(pk)}"
end
super
end
+ private :sql_for_insert
- def exec_insert(sql, name, binds, pk = nil, sequence_name = nil)
- val = exec_query(sql, name, binds)
- if !use_insert_returning? && pk
+ def exec_insert(sql, name = nil, binds = [], pk = nil, sequence_name = nil)
+ if use_insert_returning? || pk == false
+ super
+ else
+ result = exec_query(sql, name, binds)
unless sequence_name
table_ref = extract_table_ref_from_insert_sql(sql)
- sequence_name = default_sequence_name(table_ref, pk)
- return val unless sequence_name
+ if table_ref
+ pk = primary_key(table_ref) if pk.nil?
+ pk = suppress_composite_primary_key(pk)
+ sequence_name = default_sequence_name(table_ref, pk)
+ end
+ return result unless sequence_name
end
last_insert_id_result(sequence_name)
- else
- val
end
end
@@ -164,6 +162,20 @@ module ActiveRecord
def exec_rollback_db_transaction
execute "ROLLBACK"
end
+
+ private
+ def build_truncate_statements(*table_names)
+ "TRUNCATE TABLE #{table_names.map(&method(:quote_table_name)).join(", ")}"
+ end
+
+ # Returns the current ID of a table's sequence.
+ def last_insert_id_result(sequence_name)
+ exec_query("SELECT currval(#{quote(sequence_name)})", "SQL")
+ end
+
+ def suppress_composite_primary_key(pk)
+ pk unless pk.is_a?(Array)
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/explain_pretty_printer.rb b/activerecord/lib/active_record/connection_adapters/postgresql/explain_pretty_printer.rb
index 789b88912c..086a5dcc15 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/explain_pretty_printer.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/explain_pretty_printer.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
@@ -26,12 +28,12 @@ module ActiveRecord
pp = []
pp << header.center(width).rstrip
- pp << '-' * width
+ pp << "-" * width
- pp += lines.map {|line| " #{line}"}
+ pp += lines.map { |line| " #{line}" }
nrows = result.rows.length
- rows_label = nrows == 1 ? 'row' : 'rows'
+ rows_label = nrows == 1 ? "row" : "rows"
pp << "(#{nrows} #{rows_label})"
pp.join("\n") + "\n"
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid.rb
index 68752cdd80..247a25054e 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid.rb
@@ -1,25 +1,28 @@
-require 'active_record/connection_adapters/postgresql/oid/array'
-require 'active_record/connection_adapters/postgresql/oid/bit'
-require 'active_record/connection_adapters/postgresql/oid/bit_varying'
-require 'active_record/connection_adapters/postgresql/oid/bytea'
-require 'active_record/connection_adapters/postgresql/oid/cidr'
-require 'active_record/connection_adapters/postgresql/oid/date_time'
-require 'active_record/connection_adapters/postgresql/oid/decimal'
-require 'active_record/connection_adapters/postgresql/oid/enum'
-require 'active_record/connection_adapters/postgresql/oid/hstore'
-require 'active_record/connection_adapters/postgresql/oid/inet'
-require 'active_record/connection_adapters/postgresql/oid/json'
-require 'active_record/connection_adapters/postgresql/oid/jsonb'
-require 'active_record/connection_adapters/postgresql/oid/money'
-require 'active_record/connection_adapters/postgresql/oid/point'
-require 'active_record/connection_adapters/postgresql/oid/rails_5_1_point'
-require 'active_record/connection_adapters/postgresql/oid/range'
-require 'active_record/connection_adapters/postgresql/oid/specialized_string'
-require 'active_record/connection_adapters/postgresql/oid/uuid'
-require 'active_record/connection_adapters/postgresql/oid/vector'
-require 'active_record/connection_adapters/postgresql/oid/xml'
+# frozen_string_literal: true
-require 'active_record/connection_adapters/postgresql/oid/type_map_initializer'
+require "active_record/connection_adapters/postgresql/oid/array"
+require "active_record/connection_adapters/postgresql/oid/bit"
+require "active_record/connection_adapters/postgresql/oid/bit_varying"
+require "active_record/connection_adapters/postgresql/oid/bytea"
+require "active_record/connection_adapters/postgresql/oid/cidr"
+require "active_record/connection_adapters/postgresql/oid/date"
+require "active_record/connection_adapters/postgresql/oid/date_time"
+require "active_record/connection_adapters/postgresql/oid/decimal"
+require "active_record/connection_adapters/postgresql/oid/enum"
+require "active_record/connection_adapters/postgresql/oid/hstore"
+require "active_record/connection_adapters/postgresql/oid/inet"
+require "active_record/connection_adapters/postgresql/oid/jsonb"
+require "active_record/connection_adapters/postgresql/oid/money"
+require "active_record/connection_adapters/postgresql/oid/oid"
+require "active_record/connection_adapters/postgresql/oid/point"
+require "active_record/connection_adapters/postgresql/oid/legacy_point"
+require "active_record/connection_adapters/postgresql/oid/range"
+require "active_record/connection_adapters/postgresql/oid/specialized_string"
+require "active_record/connection_adapters/postgresql/oid/uuid"
+require "active_record/connection_adapters/postgresql/oid/vector"
+require "active_record/connection_adapters/postgresql/oid/xml"
+
+require "active_record/connection_adapters/postgresql/oid/type_map_initializer"
module ActiveRecord
module ConnectionAdapters
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/array.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/array.rb
index 87593ef704..b1dfbde86e 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/array.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/array.rb
@@ -1,14 +1,18 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
module OID # :nodoc:
class Array < Type::Value # :nodoc:
- include Type::Helpers::Mutable
+ include ActiveModel::Type::Helpers::Mutable
+
+ Data = Struct.new(:encoder, :values) # :nodoc:
attr_reader :subtype, :delimiter
- delegate :type, :user_input_in_time_zone, :limit, to: :subtype
+ delegate :type, :user_input_in_time_zone, :limit, :precision, :scale, to: :subtype
- def initialize(subtype, delimiter = ',')
+ def initialize(subtype, delimiter = ",")
@subtype = subtype
@delimiter = delimiter
@@ -17,8 +21,11 @@ module ActiveRecord
end
def deserialize(value)
- if value.is_a?(::String)
+ case value
+ when ::String
type_cast_array(@pg_decoder.decode(value), :deserialize)
+ when Data
+ type_cast_array(value.values, :deserialize)
else
super
end
@@ -26,14 +33,21 @@ module ActiveRecord
def cast(value)
if value.is_a?(::String)
- value = @pg_decoder.decode(value)
+ value = begin
+ @pg_decoder.decode(value)
+ rescue TypeError
+ # malformed array string is treated as [], will raise in PG 2.0 gem
+ # this keeps a consistent implementation
+ []
+ end
end
type_cast_array(value, :cast)
end
def serialize(value)
if value.is_a?(::Array)
- @pg_encoder.encode(type_cast_array(value, :serialize))
+ casted_values = type_cast_array(value, :serialize)
+ Data.new(@pg_encoder, casted_values)
else
super
end
@@ -54,15 +68,23 @@ module ActiveRecord
value.map(&block)
end
+ def changed_in_place?(raw_old_value, new_value)
+ deserialize(raw_old_value) != new_value
+ end
+
+ def force_equality?(value)
+ value.is_a?(::Array)
+ end
+
private
- def type_cast_array(value, method)
- if value.is_a?(::Array)
- value.map { |item| type_cast_array(item, method) }
- else
- @subtype.public_send(method, value)
+ def type_cast_array(value, method)
+ if value.is_a?(::Array)
+ value.map { |item| type_cast_array(item, method) }
+ else
+ @subtype.public_send(method, value)
+ end
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit.rb
index ea0fa2517f..e9a79526f9 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
@@ -7,7 +9,7 @@ module ActiveRecord
:bit
end
- def cast(value)
+ def cast_value(value)
if ::String === value
case value
when /^0x/i
@@ -16,7 +18,7 @@ module ActiveRecord
value # Bit-string notation
end
else
- value
+ value.to_s
end
end
@@ -34,16 +36,15 @@ module ActiveRecord
end
def binary?
- /\A[01]*\Z/ === value
+ /\A[01]*\Z/.match?(value)
end
def hex?
- /\A[0-9A-F]*\Z/i === value
+ /\A[0-9A-F]*\Z/i.match?(value)
end
- protected
-
- attr_reader :value
+ private
+ attr_reader :value
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit_varying.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit_varying.rb
index 4c21097d48..dc7079dda2 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit_varying.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit_varying.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/bytea.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bytea.rb
index 8f9d6e7f9b..a3c60ecef6 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/bytea.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bytea.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
@@ -6,7 +8,7 @@ module ActiveRecord
def deserialize(value)
return if value.nil?
return value.to_s if value.is_a?(Type::Binary::Data)
- PGconn.unescape_bytea(super)
+ PG::Connection.unescape_bytea(super)
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/cidr.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/cidr.rb
index eeccb09bdf..66e99d9404 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/cidr.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/cidr.rb
@@ -1,3 +1,7 @@
+# frozen_string_literal: true
+
+require "ipaddr"
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/date.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/date.rb
new file mode 100644
index 0000000000..24a1daa95a
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/date.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Date < Type::Date # :nodoc:
+ def cast_value(value)
+ case value
+ when "infinity" then ::Float::INFINITY
+ when "-infinity" then -::Float::INFINITY
+ when / BC$/
+ astronomical_year = format("%04d", -value[/^\d+/].to_i + 1)
+ super(value.sub(/ BC$/, "").sub(/^\d+/, astronomical_year))
+ else
+ super
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/date_time.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/date_time.rb
index 424769f765..cd667422f5 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/date_time.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/date_time.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
@@ -5,8 +7,8 @@ module ActiveRecord
class DateTime < Type::DateTime # :nodoc:
def cast_value(value)
case value
- when 'infinity' then ::Float::INFINITY
- when '-infinity' then -::Float::INFINITY
+ when "infinity" then ::Float::INFINITY
+ when "-infinity" then -::Float::INFINITY
when / BC$/
astronomical_year = format("%04d", -value[/^\d+/].to_i + 1)
super(value.sub(/ BC$/, "").sub(/^\d+/, astronomical_year))
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/decimal.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/decimal.rb
index 43d22c8daf..e7d33855c4 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/decimal.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/decimal.rb
@@ -1,10 +1,12 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
module OID # :nodoc:
class Decimal < Type::Decimal # :nodoc:
def infinity(options = {})
- BigDecimal.new("Infinity") * (options[:negative] ? -1 : 1)
+ BigDecimal("Infinity") * (options[:negative] ? -1 : 1)
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/enum.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/enum.rb
index 91d339f32c..f70f09ad95 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/enum.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/enum.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
@@ -9,9 +11,9 @@ module ActiveRecord
private
- def cast_value(value)
- value.to_s
- end
+ def cast_value(value)
+ value.to_s
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/hstore.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/hstore.rb
index 9270fc9f21..7b42677101 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/hstore.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/hstore.rb
@@ -1,9 +1,11 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
module OID # :nodoc:
class Hstore < Type::Value # :nodoc:
- include Type::Helpers::Mutable
+ include ActiveModel::Type::Helpers::Mutable
def type
:hstore
@@ -12,8 +14,8 @@ module ActiveRecord
def deserialize(value)
if value.is_a?(::String)
::Hash[value.scan(HstorePair).map { |k, v|
- v = v.upcase == 'NULL' ? nil : v.gsub(/\A"(.*)"\Z/m,'\1').gsub(/\\(.)/, '\1')
- k = k.gsub(/\A"(.*)"\Z/m,'\1').gsub(/\\(.)/, '\1')
+ v = v.upcase == "NULL" ? nil : v.gsub(/\A"(.*)"\Z/m, '\1').gsub(/\\(.)/, '\1')
+ k = k.gsub(/\A"(.*)"\Z/m, '\1').gsub(/\\(.)/, '\1')
[k, v]
}]
else
@@ -23,7 +25,9 @@ module ActiveRecord
def serialize(value)
if value.is_a?(::Hash)
- value.map { |k, v| "#{escape_hstore(k)}=>#{escape_hstore(v)}" }.join(', ')
+ value.map { |k, v| "#{escape_hstore(k)}=>#{escape_hstore(v)}" }.join(", ")
+ elsif value.respond_to?(:to_unsafe_h)
+ serialize(value.to_unsafe_h)
else
value
end
@@ -33,25 +37,33 @@ module ActiveRecord
ActiveRecord::Store::StringKeyedHashAccessor
end
+ # Will compare the Hash equivalents of +raw_old_value+ and +new_value+.
+ # By comparing hashes, this avoids an edge case where the order of
+ # the keys change between the two hashes, and they would not be marked
+ # as equal.
+ def changed_in_place?(raw_old_value, new_value)
+ deserialize(raw_old_value) != new_value
+ end
+
private
- HstorePair = begin
- quoted_string = /"[^"\\]*(?:\\.[^"\\]*)*"/
- unquoted_string = /(?:\\.|[^\s,])[^\s=,\\]*(?:\\.[^\s=,\\]*|=[^,>])*/
- /(#{quoted_string}|#{unquoted_string})\s*=>\s*(#{quoted_string}|#{unquoted_string})/
- end
+ HstorePair = begin
+ quoted_string = /"[^"\\]*(?:\\.[^"\\]*)*"/
+ unquoted_string = /(?:\\.|[^\s,])[^\s=,\\]*(?:\\.[^\s=,\\]*|=[^,>])*/
+ /(#{quoted_string}|#{unquoted_string})\s*=>\s*(#{quoted_string}|#{unquoted_string})/
+ end
- def escape_hstore(value)
- if value.nil?
- 'NULL'
- else
- if value == ""
- '""'
+ def escape_hstore(value)
+ if value.nil?
+ "NULL"
else
- '"%s"' % value.to_s.gsub(/(["\\])/, '\\\\\1')
+ if value == ""
+ '""'
+ else
+ '"%s"' % value.to_s.gsub(/(["\\])/, '\\\\\1')
+ end
end
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/inet.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/inet.rb
index 96486fa65b..55be71fd26 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/inet.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/inet.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/jsonb.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/jsonb.rb
index 87391b5dc7..e0216f1089 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/jsonb.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/jsonb.rb
@@ -1,21 +1,13 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
module OID # :nodoc:
- class Jsonb < Json # :nodoc:
+ class Jsonb < Type::Json # :nodoc:
def type
:jsonb
end
-
- def changed_in_place?(raw_old_value, new_value)
- # Postgres does not preserve insignificant whitespaces when
- # round-tripping jsonb columns. This causes some false positives for
- # the comparison here. Therefore, we need to parse and re-dump the
- # raw value here to ensure the insignificant whitespaces are
- # consistent with our encoder's output.
- raw_old_value = serialize(deserialize(raw_old_value))
- super(raw_old_value, new_value)
- end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/legacy_point.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/legacy_point.rb
new file mode 100644
index 0000000000..7f6adc351c
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/legacy_point.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class LegacyPoint < Type::Value # :nodoc:
+ include ActiveModel::Type::Helpers::Mutable
+
+ def type
+ :point
+ end
+
+ def cast(value)
+ case value
+ when ::String
+ if value[0] == "(" && value[-1] == ")"
+ value = value[1...-1]
+ end
+ cast(value.split(","))
+ when ::Array
+ value.map { |v| Float(v) }
+ else
+ value
+ end
+ end
+
+ def serialize(value)
+ if value.is_a?(::Array)
+ "(#{number_for_point(value[0])},#{number_for_point(value[1])})"
+ else
+ super
+ end
+ end
+
+ private
+
+ def number_for_point(number)
+ number.to_s.gsub(/\.0$/, "")
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/money.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/money.rb
index dcc12ae2a4..6434377b57 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/money.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/money.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
@@ -22,12 +24,12 @@ module ActiveRecord
# (3) -$2.55
# (4) ($2.55)
- value.sub!(/^\((.+)\)$/, '-\1') # (4)
+ value = value.sub(/^\((.+)\)$/, '-\1') # (4)
case value
when /^-?\D+[\d,]+\.\d{2}$/ # (1)
- value.gsub!(/[^-\d.]/, '')
+ value.gsub!(/[^-\d.]/, "")
when /^-?\D+[\d.]+,\d{2}$/ # (2)
- value.gsub!(/[^-\d,]/, '').sub!(/,/, '.')
+ value.gsub!(/[^-\d,]/, "").sub!(/,/, ".")
end
super(value)
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/json.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/oid.rb
index dbc879ffd4..d8c044320d 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/json.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/oid.rb
@@ -1,8 +1,13 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
module OID # :nodoc:
- class Json < Type::Internal::AbstractJson
+ class Oid < Type::Integer # :nodoc:
+ def type
+ :oid
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/point.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/point.rb
index bf565bcf47..8c74cecc4d 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/point.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/point.rb
@@ -1,9 +1,13 @@
+# frozen_string_literal: true
+
module ActiveRecord
+ Point = Struct.new(:x, :y)
+
module ConnectionAdapters
module PostgreSQL
module OID # :nodoc:
class Point < Type::Value # :nodoc:
- include Type::Helpers::Mutable
+ include ActiveModel::Type::Helpers::Mutable
def type
:point
@@ -12,20 +16,34 @@ module ActiveRecord
def cast(value)
case value
when ::String
- if value[0] == '(' && value[-1] == ')'
+ return if value.blank?
+
+ if value[0] == "(" && value[-1] == ")"
value = value[1...-1]
end
- cast(value.split(','))
+ x, y = value.split(",")
+ build_point(x, y)
when ::Array
- value.map { |v| Float(v) }
+ build_point(*value)
else
value
end
end
def serialize(value)
- if value.is_a?(::Array)
- "(#{number_for_point(value[0])},#{number_for_point(value[1])})"
+ case value
+ when ActiveRecord::Point
+ "(#{number_for_point(value.x)},#{number_for_point(value.y)})"
+ when ::Array
+ serialize(build_point(*value))
+ else
+ super
+ end
+ end
+
+ def type_cast_for_schema(value)
+ if ActiveRecord::Point === value
+ [value.x, value.y]
else
super
end
@@ -33,9 +51,13 @@ module ActiveRecord
private
- def number_for_point(number)
- number.to_s.gsub(/\.0$/, '')
- end
+ def number_for_point(number)
+ number.to_s.gsub(/\.0$/, "")
+ end
+
+ def build_point(x, y)
+ ActiveRecord::Point.new(Float(x), Float(y))
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/rails_5_1_point.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/rails_5_1_point.rb
deleted file mode 100644
index 7427a25ad5..0000000000
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/rails_5_1_point.rb
+++ /dev/null
@@ -1,50 +0,0 @@
-module ActiveRecord
- Point = Struct.new(:x, :y)
-
- module ConnectionAdapters
- module PostgreSQL
- module OID # :nodoc:
- class Rails51Point < Type::Value # :nodoc:
- include Type::Helpers::Mutable
-
- def type
- :point
- end
-
- def cast(value)
- case value
- when ::String
- if value[0] == '(' && value[-1] == ')'
- value = value[1...-1]
- end
- x, y = value.split(",")
- build_point(x, y)
- when ::Array
- build_point(*value)
- else
- value
- end
- end
-
- def serialize(value)
- if value.is_a?(ActiveRecord::Point)
- "(#{number_for_point(value.x)},#{number_for_point(value.y)})"
- else
- super
- end
- end
-
- private
-
- def number_for_point(number)
- number.to_s.gsub(/\.0$/, '')
- end
-
- def build_point(x, y)
- ActiveRecord::Point.new(Float(x), Float(y))
- end
- end
- end
- end
- end
-end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/range.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/range.rb
index a8d2310035..aa7701e038 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/range.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/range.rb
@@ -1,4 +1,4 @@
-require 'active_support/core_ext/string/filters'
+# frozen_string_literal: true
module ActiveRecord
module ConnectionAdapters
@@ -14,11 +14,11 @@ module ActiveRecord
end
def type_cast_for_schema(value)
- value.inspect.gsub('Infinity', '::Float::INFINITY')
+ value.inspect.gsub("Infinity", "::Float::INFINITY")
end
def cast_value(value)
- return if value == 'empty'
+ return if value == "empty"
return value unless value.is_a?(::String)
extracted = extract_bounds(value)
@@ -35,7 +35,7 @@ module ActiveRecord
if value.is_a?(::Range)
from = type_cast_single_for_database(value.begin)
to = type_cast_single_for_database(value.end)
- "[#{from},#{to}#{value.exclude_end? ? ')' : ']'}"
+ ::Range.new(from, to, value.exclude_end?)
else
super
end
@@ -53,39 +53,43 @@ module ActiveRecord
::Range.new(new_begin, new_end, value.exclude_end?)
end
+ def force_equality?(value)
+ value.is_a?(::Range)
+ end
+
private
- def type_cast_single(value)
- infinity?(value) ? value : @subtype.deserialize(value)
- end
+ def type_cast_single(value)
+ infinity?(value) ? value : @subtype.deserialize(value)
+ end
- def type_cast_single_for_database(value)
- infinity?(value) ? '' : @subtype.serialize(value)
- end
+ def type_cast_single_for_database(value)
+ infinity?(value) ? value : @subtype.serialize(@subtype.cast(value))
+ end
- def extract_bounds(value)
- from, to = value[1..-2].split(',')
- {
- from: (value[1] == ',' || from == '-infinity') ? infinity(negative: true) : from,
- to: (value[-2] == ',' || to == 'infinity') ? infinity : to,
- exclude_start: (value[0] == '('),
- exclude_end: (value[-1] == ')')
- }
- end
+ def extract_bounds(value)
+ from, to = value[1..-2].split(",")
+ {
+ from: (value[1] == "," || from == "-infinity") ? infinity(negative: true) : from,
+ to: (value[-2] == "," || to == "infinity") ? infinity : to,
+ exclude_start: (value[0] == "("),
+ exclude_end: (value[-1] == ")")
+ }
+ end
- def infinity(negative: false)
- if subtype.respond_to?(:infinity)
- subtype.infinity(negative: negative)
- elsif negative
- -::Float::INFINITY
- else
- ::Float::INFINITY
+ def infinity(negative: false)
+ if subtype.respond_to?(:infinity)
+ subtype.infinity(negative: negative)
+ elsif negative
+ -::Float::INFINITY
+ else
+ ::Float::INFINITY
+ end
end
- end
- def infinity?(value)
- value.respond_to?(:infinite?) && value.infinite?
- end
+ def infinity?(value)
+ value.respond_to?(:infinite?) && value.infinite?
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/specialized_string.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/specialized_string.rb
index 2d2fede4e8..4ad1344f05 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/specialized_string.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/specialized_string.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
@@ -5,8 +7,9 @@ module ActiveRecord
class SpecializedString < Type::String # :nodoc:
attr_reader :type
- def initialize(type)
+ def initialize(type, **options)
@type = type
+ super(options)
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/type_map_initializer.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/type_map_initializer.rb
index 6155e53632..203087bc36 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/type_map_initializer.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/type_map_initializer.rb
@@ -1,3 +1,7 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/array/extract"
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
@@ -13,13 +17,13 @@ module ActiveRecord
end
def run(records)
- nodes = records.reject { |row| @store.key? row['oid'].to_i }
- mapped, nodes = nodes.partition { |row| @store.key? row['typname'] }
- ranges, nodes = nodes.partition { |row| row['typtype'] == 'r'.freeze }
- enums, nodes = nodes.partition { |row| row['typtype'] == 'e'.freeze }
- domains, nodes = nodes.partition { |row| row['typtype'] == 'd'.freeze }
- arrays, nodes = nodes.partition { |row| row['typinput'] == 'array_in'.freeze }
- composites, nodes = nodes.partition { |row| row['typelem'].to_i != 0 }
+ nodes = records.reject { |row| @store.key? row["oid"].to_i }
+ mapped = nodes.extract! { |row| @store.key? row["typname"] }
+ ranges = nodes.extract! { |row| row["typtype"] == "r" }
+ enums = nodes.extract! { |row| row["typtype"] == "e" }
+ domains = nodes.extract! { |row| row["typtype"] == "d" }
+ arrays = nodes.extract! { |row| row["typinput"] == "array_in" }
+ composites = nodes.extract! { |row| row["typelem"].to_i != 0 }
mapped.each { |row| register_mapped_type(row) }
enums.each { |row| register_enum_type(row) }
@@ -29,10 +33,10 @@ module ActiveRecord
composites.each { |row| register_composite_type(row) }
end
- def query_conditions_for_initial_load(type_map)
- known_type_names = type_map.keys.map { |n| "'#{n}'" }
+ def query_conditions_for_initial_load
+ known_type_names = @store.keys.map { |n| "'#{n}'" }
known_type_types = %w('r' 'e' 'd')
- <<-SQL % [known_type_names.join(", "), known_type_types.join(", ")]
+ <<~SQL % [known_type_names.join(", "), known_type_types.join(", ")]
WHERE
t.typname IN (%s)
OR t.typtype IN (%s)
@@ -42,66 +46,66 @@ module ActiveRecord
end
private
- def register_mapped_type(row)
- alias_type row['oid'], row['typname']
- end
+ def register_mapped_type(row)
+ alias_type row["oid"], row["typname"]
+ end
- def register_enum_type(row)
- register row['oid'], OID::Enum.new
- end
+ def register_enum_type(row)
+ register row["oid"], OID::Enum.new
+ end
- def register_array_type(row)
- register_with_subtype(row['oid'], row['typelem'].to_i) do |subtype|
- OID::Array.new(subtype, row['typdelim'])
+ def register_array_type(row)
+ register_with_subtype(row["oid"], row["typelem"].to_i) do |subtype|
+ OID::Array.new(subtype, row["typdelim"])
+ end
end
- end
- def register_range_type(row)
- register_with_subtype(row['oid'], row['rngsubtype'].to_i) do |subtype|
- OID::Range.new(subtype, row['typname'].to_sym)
+ def register_range_type(row)
+ register_with_subtype(row["oid"], row["rngsubtype"].to_i) do |subtype|
+ OID::Range.new(subtype, row["typname"].to_sym)
+ end
end
- end
- def register_domain_type(row)
- if base_type = @store.lookup(row["typbasetype"].to_i)
- register row['oid'], base_type
- else
- warn "unknown base type (OID: #{row["typbasetype"]}) for domain #{row["typname"]}."
+ def register_domain_type(row)
+ if base_type = @store.lookup(row["typbasetype"].to_i)
+ register row["oid"], base_type
+ else
+ warn "unknown base type (OID: #{row["typbasetype"]}) for domain #{row["typname"]}."
+ end
end
- end
- def register_composite_type(row)
- if subtype = @store.lookup(row['typelem'].to_i)
- register row['oid'], OID::Vector.new(row['typdelim'], subtype)
+ def register_composite_type(row)
+ if subtype = @store.lookup(row["typelem"].to_i)
+ register row["oid"], OID::Vector.new(row["typdelim"], subtype)
+ end
end
- end
- def register(oid, oid_type = nil, &block)
- oid = assert_valid_registration(oid, oid_type || block)
- if block_given?
- @store.register_type(oid, &block)
- else
- @store.register_type(oid, oid_type)
+ def register(oid, oid_type = nil, &block)
+ oid = assert_valid_registration(oid, oid_type || block)
+ if block_given?
+ @store.register_type(oid, &block)
+ else
+ @store.register_type(oid, oid_type)
+ end
end
- end
- def alias_type(oid, target)
- oid = assert_valid_registration(oid, target)
- @store.alias_type(oid, target)
- end
+ def alias_type(oid, target)
+ oid = assert_valid_registration(oid, target)
+ @store.alias_type(oid, target)
+ end
- def register_with_subtype(oid, target_oid)
- if @store.key?(target_oid)
- register(oid) do |_, *args|
- yield @store.lookup(target_oid, *args)
+ def register_with_subtype(oid, target_oid)
+ if @store.key?(target_oid)
+ register(oid) do |_, *args|
+ yield @store.lookup(target_oid, *args)
+ end
end
end
- end
- def assert_valid_registration(oid, oid_type)
- raise ArgumentError, "can't register nil type for OID #{oid}" if oid_type.nil?
- oid.to_i
- end
+ def assert_valid_registration(oid, oid_type)
+ raise ArgumentError, "can't register nil type for OID #{oid}" if oid_type.nil?
+ oid.to_i
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/uuid.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/uuid.rb
index 5e839228e9..28abdbd073 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/uuid.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/uuid.rb
@@ -1,9 +1,11 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
module OID # :nodoc:
class Uuid < Type::Value # :nodoc:
- ACCEPTABLE_UUID = %r{\A\{?([a-fA-F0-9]{4}-?){8}\}?\z}x
+ ACCEPTABLE_UUID = %r{\A(\{)?([a-fA-F0-9]{4}-?){8}(?(1)\}|)\z}
alias_method :serialize, :deserialize
@@ -11,9 +13,12 @@ module ActiveRecord
:uuid
end
- def cast(value)
- value.to_s[ACCEPTABLE_UUID, 0]
- end
+ private
+
+ def cast_value(value)
+ casted = value.to_s
+ casted if casted.match?(ACCEPTABLE_UUID)
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/vector.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/vector.rb
index b26e876b54..88ef626a16 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/vector.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/vector.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/xml.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/xml.rb
index d40d837cee..042f32fdc3 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/oid/xml.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/xml.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/quoting.rb b/activerecord/lib/active_record/connection_adapters/postgresql/quoting.rb
index 6414459cd1..d40e0ef1f0 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/quoting.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/quoting.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
@@ -28,12 +30,12 @@ module ActiveRecord
# - "schema.name".table_name
# - "schema.name"."table.name"
def quote_table_name(name) # :nodoc:
- @quoted_table_names[name] ||= Utils.extract_schema_qualified_name(name.to_s).quoted
+ @quoted_table_names[name] ||= Utils.extract_schema_qualified_name(name.to_s).quoted.freeze
end
# Quotes schema names for use in SQL queries.
def quote_schema_name(name)
- PGconn.quote_ident(name)
+ PG::Connection.quote_ident(name)
end
def quote_table_name_for_assignment(table, attr)
@@ -42,7 +44,7 @@ module ActiveRecord
# Quotes column names for use in SQL queries.
def quote_column_name(name) # :nodoc:
- @quoted_column_names[name] ||= PGconn.quote_ident(super)
+ @quoted_column_names[name] ||= PG::Connection.quote_ident(super).freeze
end
# Quote date/time values for use in SQL input.
@@ -55,10 +57,14 @@ module ActiveRecord
end
end
+ def quoted_binary(value) # :nodoc:
+ "'#{escape_bytea(value.to_s)}'"
+ end
+
def quote_default_expression(value, column) # :nodoc:
if value.is_a?(Proc)
value.call
- elsif column.type == :uuid && value =~ /\(\)/
+ elsif column.type == :uuid && value.is_a?(String) && /\(\)/.match?(value)
value # Does not quote function default values for UUID columns
elsif column.respond_to?(:array?)
value = type_cast_from_column(column, value)
@@ -73,43 +79,89 @@ module ActiveRecord
end
private
+ def lookup_cast_type(sql_type)
+ super(query_value("SELECT #{quote(sql_type)}::regtype::oid", "SCHEMA").to_i)
+ end
- def _quote(value)
- case value
- when Type::Binary::Data
- "'#{escape_bytea(value.to_s)}'"
- when OID::Xml::Data
- "xml '#{quote_string(value.to_s)}'"
- when OID::Bit::Data
- if value.binary?
- "B'#{value}'"
- elsif value.hex?
- "X'#{value}'"
+ def _quote(value)
+ case value
+ when OID::Xml::Data
+ "xml '#{quote_string(value.to_s)}'"
+ when OID::Bit::Data
+ if value.binary?
+ "B'#{value}'"
+ elsif value.hex?
+ "X'#{value}'"
+ end
+ when Numeric
+ if value.finite?
+ super
+ else
+ "'#{value}'"
+ end
+ when OID::Array::Data
+ _quote(encode_array(value))
+ when Range
+ _quote(encode_range(value))
+ else
+ super
end
- when Float
- if value.infinite? || value.nan?
- "'#{value}'"
+ end
+
+ def _type_cast(value)
+ case value
+ when Type::Binary::Data
+ # Return a bind param hash with format as binary.
+ # See https://deveiate.org/code/pg/PG/Connection.html#method-i-exec_prepared-doc
+ # for more information
+ { value: value.to_s, format: 1 }
+ when OID::Xml::Data, OID::Bit::Data
+ value.to_s
+ when OID::Array::Data
+ encode_array(value)
+ when Range
+ encode_range(value)
else
super
end
- else
- super
end
- end
- def _type_cast(value)
- case value
- when Type::Binary::Data
- # Return a bind param hash with format as binary.
- # See http://deveiate.org/code/pg/PGconn.html#method-i-exec_prepared-doc
- # for more information
- { value: value.to_s, format: 1 }
- when OID::Xml::Data, OID::Bit::Data
- value.to_s
- else
- super
+ def encode_array(array_data)
+ encoder = array_data.encoder
+ values = type_cast_array(array_data.values)
+
+ result = encoder.encode(values)
+ if encoding = determine_encoding_of_strings_in_array(values)
+ result.force_encoding(encoding)
+ end
+ result
+ end
+
+ def encode_range(range)
+ "[#{type_cast_range_value(range.begin)},#{type_cast_range_value(range.end)}#{range.exclude_end? ? ')' : ']'}"
+ end
+
+ def determine_encoding_of_strings_in_array(value)
+ case value
+ when ::Array then determine_encoding_of_strings_in_array(value.first)
+ when ::String then value.encoding
+ end
+ end
+
+ def type_cast_array(values)
+ case values
+ when ::Array then values.map { |item| type_cast_array(item) }
+ else _type_cast(values)
+ end
+ end
+
+ def type_cast_range_value(value)
+ infinity?(value) ? "" : type_cast(value)
+ end
+
+ def infinity?(value)
+ value.respond_to?(:infinite?) && value.infinite?
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/referential_integrity.rb b/activerecord/lib/active_record/connection_adapters/postgresql/referential_integrity.rb
index 44a7338bf5..8df91c988b 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/referential_integrity.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/referential_integrity.rb
@@ -1,27 +1,24 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
module ReferentialIntegrity # :nodoc:
- def supports_disable_referential_integrity? # :nodoc:
- true
- end
-
def disable_referential_integrity # :nodoc:
- if supports_disable_referential_integrity?
- original_exception = nil
+ original_exception = nil
- begin
- transaction(requires_new: true) do
- execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} DISABLE TRIGGER ALL" }.join(";"))
- end
- rescue ActiveRecord::ActiveRecordError => e
- original_exception = e
+ begin
+ transaction(requires_new: true) do
+ execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} DISABLE TRIGGER ALL" }.join(";"))
end
+ rescue ActiveRecord::ActiveRecordError => e
+ original_exception = e
+ end
- begin
- yield
- rescue ActiveRecord::InvalidForeignKey => e
- warn <<-WARNING
+ begin
+ yield
+ rescue ActiveRecord::InvalidForeignKey => e
+ warn <<-WARNING
WARNING: Rails was not able to disable referential integrity.
This is most likely caused due to missing permissions.
@@ -30,17 +27,14 @@ Rails needs superuser privileges to disable referential integrity.
cause: #{original_exception.try(:message)}
WARNING
- raise e
- end
+ raise e
+ end
- begin
- transaction(requires_new: true) do
- execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} ENABLE TRIGGER ALL" }.join(";"))
- end
- rescue ActiveRecord::ActiveRecordError
+ begin
+ transaction(requires_new: true) do
+ execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} ENABLE TRIGGER ALL" }.join(";"))
end
- else
- yield
+ rescue ActiveRecord::ActiveRecordError
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/schema_creation.rb b/activerecord/lib/active_record/connection_adapters/postgresql/schema_creation.rb
new file mode 100644
index 0000000000..84dd28907b
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/schema_creation.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ class SchemaCreation < AbstractAdapter::SchemaCreation # :nodoc:
+ private
+ def visit_AlterTable(o)
+ super << o.constraint_validations.map { |fk| visit_ValidateConstraint fk }.join(" ")
+ end
+
+ def visit_AddForeignKey(o)
+ super.dup.tap { |sql| sql << " NOT VALID" unless o.validate? }
+ end
+
+ def visit_ValidateConstraint(name)
+ "VALIDATE CONSTRAINT #{quote_column_name(name)}"
+ end
+
+ def visit_ChangeColumnDefinition(o)
+ column = o.column
+ column.sql_type = type_to_sql(column.type, column.options)
+ quoted_column_name = quote_column_name(o.name)
+
+ change_column_sql = +"ALTER COLUMN #{quoted_column_name} TYPE #{column.sql_type}"
+
+ options = column_options(column)
+
+ if options[:collation]
+ change_column_sql << " COLLATE \"#{options[:collation]}\""
+ end
+
+ if options[:using]
+ change_column_sql << " USING #{options[:using]}"
+ elsif options[:cast_as]
+ cast_as_type = type_to_sql(options[:cast_as], options)
+ change_column_sql << " USING CAST(#{quoted_column_name} AS #{cast_as_type})"
+ end
+
+ if options.key?(:default)
+ if options[:default].nil?
+ change_column_sql << ", ALTER COLUMN #{quoted_column_name} DROP DEFAULT"
+ else
+ quoted_default = quote_default_expression(options[:default], column)
+ change_column_sql << ", ALTER COLUMN #{quoted_column_name} SET DEFAULT #{quoted_default}"
+ end
+ end
+
+ if options.key?(:null)
+ change_column_sql << ", ALTER COLUMN #{quoted_column_name} #{options[:null] ? 'DROP' : 'SET'} NOT NULL"
+ end
+
+ change_column_sql
+ end
+
+ def add_column_options!(sql, options)
+ if options[:collation]
+ sql << " COLLATE \"#{options[:collation]}\""
+ end
+ super
+ end
+
+ # Returns any SQL string to go between CREATE and TABLE. May be nil.
+ def table_modifier_in_create(o)
+ # A table cannot be both TEMPORARY and UNLOGGED, since all TEMPORARY
+ # tables are already UNLOGGED.
+ if o.temporary
+ " TEMPORARY"
+ elsif o.unlogged
+ " UNLOGGED"
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/schema_definitions.rb b/activerecord/lib/active_record/connection_adapters/postgresql/schema_definitions.rb
index 6399bddbee..3bb7c52899 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/schema_definitions.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/schema_definitions.rb
@@ -1,7 +1,11 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
module ColumnMethods
+ extend ActiveSupport::Concern
+
# Defines the primary key field.
# Use of the native PostgreSQL UUID type is supported, and can be used
# by defining your tables as such:
@@ -11,11 +15,22 @@ module ActiveRecord
# t.timestamps
# end
#
- # By default, this will use the +uuid_generate_v4()+ function from the
- # +uuid-ossp+ extension, which MUST be enabled on your database. To enable
- # the +uuid-ossp+ extension, you can use the +enable_extension+ method in your
- # migrations. To use a UUID primary key without +uuid-ossp+ enabled, you can
- # set the +:default+ option to +nil+:
+ # By default, this will use the <tt>gen_random_uuid()</tt> function from the
+ # +pgcrypto+ extension. As that extension is only available in
+ # PostgreSQL 9.4+, for earlier versions an explicit default can be set
+ # to use <tt>uuid_generate_v4()</tt> from the +uuid-ossp+ extension instead:
+ #
+ # create_table :stuffs, id: false do |t|
+ # t.primary_key :id, :uuid, default: "uuid_generate_v4()"
+ # t.uuid :foo_id
+ # t.timestamps
+ # end
+ #
+ # To enable the appropriate extension, which is a requirement, use
+ # the +enable_extension+ method in your migrations.
+ #
+ # To use a UUID primary key without any of the extensions, set the
+ # +:default+ option to +nil+:
#
# create_table :stuffs, id: false do |t|
# t.primary_key :id, :uuid, default: nil
@@ -23,158 +38,185 @@ module ActiveRecord
# t.timestamps
# end
#
- # You may also pass a different UUID generation function from +uuid-ossp+
- # or another library.
+ # You may also pass a custom stored procedure that returns a UUID or use a
+ # different UUID generation function from another library.
#
# Note that setting the UUID primary key default value to +nil+ will
# require you to assure that you always provide a UUID value before saving
# a record (as primary keys cannot be +nil+). This might be done via the
# +SecureRandom.uuid+ method and a +before_save+ callback, for instance.
def primary_key(name, type = :primary_key, **options)
- options[:default] = options.fetch(:default, 'uuid_generate_v4()') if type == :uuid
+ if type == :uuid
+ options[:default] = options.fetch(:default, "gen_random_uuid()")
+ end
+
super
end
- def bigserial(*args, **options)
- args.each { |name| column(name, :bigserial, options) }
- end
+ ##
+ # :method: bigserial
+ # :call-seq: bigserial(*names, **options)
- def bit(*args, **options)
- args.each { |name| column(name, :bit, options) }
- end
+ ##
+ # :method: bit
+ # :call-seq: bit(*names, **options)
- def bit_varying(*args, **options)
- args.each { |name| column(name, :bit_varying, options) }
- end
+ ##
+ # :method: bit_varying
+ # :call-seq: bit_varying(*names, **options)
- def cidr(*args, **options)
- args.each { |name| column(name, :cidr, options) }
- end
+ ##
+ # :method: cidr
+ # :call-seq: cidr(*names, **options)
- def citext(*args, **options)
- args.each { |name| column(name, :citext, options) }
- end
+ ##
+ # :method: citext
+ # :call-seq: citext(*names, **options)
- def daterange(*args, **options)
- args.each { |name| column(name, :daterange, options) }
- end
+ ##
+ # :method: daterange
+ # :call-seq: daterange(*names, **options)
- def hstore(*args, **options)
- args.each { |name| column(name, :hstore, options) }
- end
+ ##
+ # :method: hstore
+ # :call-seq: hstore(*names, **options)
- def inet(*args, **options)
- args.each { |name| column(name, :inet, options) }
- end
+ ##
+ # :method: inet
+ # :call-seq: inet(*names, **options)
- def int4range(*args, **options)
- args.each { |name| column(name, :int4range, options) }
- end
+ ##
+ # :method: interval
+ # :call-seq: interval(*names, **options)
- def int8range(*args, **options)
- args.each { |name| column(name, :int8range, options) }
- end
+ ##
+ # :method: int4range
+ # :call-seq: int4range(*names, **options)
- def json(*args, **options)
- args.each { |name| column(name, :json, options) }
- end
+ ##
+ # :method: int8range
+ # :call-seq: int8range(*names, **options)
- def jsonb(*args, **options)
- args.each { |name| column(name, :jsonb, options) }
- end
+ ##
+ # :method: jsonb
+ # :call-seq: jsonb(*names, **options)
- def ltree(*args, **options)
- args.each { |name| column(name, :ltree, options) }
- end
+ ##
+ # :method: ltree
+ # :call-seq: ltree(*names, **options)
- def macaddr(*args, **options)
- args.each { |name| column(name, :macaddr, options) }
- end
+ ##
+ # :method: macaddr
+ # :call-seq: macaddr(*names, **options)
- def money(*args, **options)
- args.each { |name| column(name, :money, options) }
- end
+ ##
+ # :method: money
+ # :call-seq: money(*names, **options)
- def numrange(*args, **options)
- args.each { |name| column(name, :numrange, options) }
- end
+ ##
+ # :method: numrange
+ # :call-seq: numrange(*names, **options)
- def point(*args, **options)
- args.each { |name| column(name, :point, options) }
- end
+ ##
+ # :method: oid
+ # :call-seq: oid(*names, **options)
- def line(*args, **options)
- args.each { |name| column(name, :line, options) }
- end
+ ##
+ # :method: point
+ # :call-seq: point(*names, **options)
- def lseg(*args, **options)
- args.each { |name| column(name, :lseg, options) }
- end
+ ##
+ # :method: line
+ # :call-seq: line(*names, **options)
- def box(*args, **options)
- args.each { |name| column(name, :box, options) }
- end
+ ##
+ # :method: lseg
+ # :call-seq: lseg(*names, **options)
- def path(*args, **options)
- args.each { |name| column(name, :path, options) }
- end
+ ##
+ # :method: box
+ # :call-seq: box(*names, **options)
- def polygon(*args, **options)
- args.each { |name| column(name, :polygon, options) }
- end
+ ##
+ # :method: path
+ # :call-seq: path(*names, **options)
- def circle(*args, **options)
- args.each { |name| column(name, :circle, options) }
- end
+ ##
+ # :method: polygon
+ # :call-seq: polygon(*names, **options)
- def serial(*args, **options)
- args.each { |name| column(name, :serial, options) }
- end
+ ##
+ # :method: circle
+ # :call-seq: circle(*names, **options)
- def tsrange(*args, **options)
- args.each { |name| column(name, :tsrange, options) }
- end
+ ##
+ # :method: serial
+ # :call-seq: serial(*names, **options)
- def tstzrange(*args, **options)
- args.each { |name| column(name, :tstzrange, options) }
- end
+ ##
+ # :method: tsrange
+ # :call-seq: tsrange(*names, **options)
- def tsvector(*args, **options)
- args.each { |name| column(name, :tsvector, options) }
- end
+ ##
+ # :method: tstzrange
+ # :call-seq: tstzrange(*names, **options)
- def uuid(*args, **options)
- args.each { |name| column(name, :uuid, options) }
- end
+ ##
+ # :method: tsvector
+ # :call-seq: tsvector(*names, **options)
- def xml(*args, **options)
- args.each { |name| column(name, :xml, options) }
- end
- end
+ ##
+ # :method: uuid
+ # :call-seq: uuid(*names, **options)
+
+ ##
+ # :method: xml
+ # :call-seq: xml(*names, **options)
- class ColumnDefinition < ActiveRecord::ConnectionAdapters::ColumnDefinition
- attr_accessor :array
+ included do
+ define_column_methods :bigserial, :bit, :bit_varying, :cidr, :citext, :daterange,
+ :hstore, :inet, :interval, :int4range, :int8range, :jsonb, :ltree, :macaddr,
+ :money, :numrange, :oid, :point, :line, :lseg, :box, :path, :polygon, :circle,
+ :serial, :tsrange, :tstzrange, :tsvector, :uuid, :xml
+ end
end
class TableDefinition < ActiveRecord::ConnectionAdapters::TableDefinition
include ColumnMethods
- def new_column_definition(name, type, options) # :nodoc:
- column = super
- column.array = options[:array]
- column
+ attr_reader :unlogged
+
+ def initialize(*)
+ super
+ @unlogged = ActiveRecord::ConnectionAdapters::PostgreSQLAdapter.create_unlogged_tables
end
private
-
- def create_column_definition(name, type)
- PostgreSQL::ColumnDefinition.new name, type
+ def integer_like_primary_key_type(type, options)
+ if type == :bigint || options[:limit] == 8
+ :bigserial
+ else
+ :serial
+ end
end
end
class Table < ActiveRecord::ConnectionAdapters::Table
include ColumnMethods
end
+
+ class AlterTable < ActiveRecord::ConnectionAdapters::AlterTable
+ attr_reader :constraint_validations
+
+ def initialize(td)
+ super
+ @constraint_validations = []
+ end
+
+ def validate_constraint(name)
+ @constraint_validations << name
+ end
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/schema_dumper.rb b/activerecord/lib/active_record/connection_adapters/postgresql/schema_dumper.rb
index 1047ba8cac..84643d20da 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/schema_dumper.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/schema_dumper.rb
@@ -1,46 +1,49 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
- module ColumnDumper
- def column_spec_for_primary_key(column)
- spec = super.except!(:null)
- if schema_type(column) == :uuid
- spec[:default] ||= 'nil'
- end
- spec
- end
+ class SchemaDumper < ConnectionAdapters::SchemaDumper # :nodoc:
+ private
- # Adds +:array+ option to the default set
- def prepare_column_options(column)
- spec = super
- spec[:array] = 'true' if column.array?
- spec
- end
+ def extensions(stream)
+ extensions = @connection.extensions
+ if extensions.any?
+ stream.puts " # These are extensions that must be enabled in order to support this database"
+ extensions.sort.each do |extension|
+ stream.puts " enable_extension #{extension.inspect}"
+ end
+ stream.puts
+ end
+ end
- # Adds +:array+ as a valid migration key
- def migration_keys
- super + [:array]
- end
+ def prepare_column_options(column)
+ spec = super
+ spec[:array] = "true" if column.array?
+ spec
+ end
- private
+ def default_primary_key?(column)
+ schema_type(column) == :bigserial
+ end
- def default_primary_key?(column)
- schema_type(column) == :serial
- end
+ def explicit_primary_key_default?(column)
+ column.type == :uuid || (column.type == :integer && !column.serial?)
+ end
- def schema_type(column)
- return super unless column.serial?
+ def schema_type(column)
+ return super unless column.serial?
- if column.bigint?
- :bigserial
- else
- :serial
+ if column.bigint?
+ :bigserial
+ else
+ :serial
+ end
end
- end
- def schema_expression(column)
- super unless column.serial?
- end
+ def schema_expression(column)
+ super unless column.serial?
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/schema_statements.rb b/activerecord/lib/active_record/connection_adapters/postgresql/schema_statements.rb
index ca2a41b136..a38c1325c0 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/schema_statements.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/schema_statements.rb
@@ -1,22 +1,8 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
- class SchemaCreation < AbstractAdapter::SchemaCreation
- private
-
- def visit_ColumnDefinition(o)
- o.sql_type = type_to_sql(o.type, o.limit, o.precision, o.scale, o.array)
- super
- end
-
- def add_column_options!(sql, options)
- if options[:collation]
- sql << " COLLATE \"#{options[:collation]}\""
- end
- super
- end
- end
-
module SchemaStatements
# Drops the database specified on the +name+ attribute
# and creates it again using the provided +options+.
@@ -34,26 +20,26 @@ module ActiveRecord
# create_database config[:database], config
# create_database 'foo_development', encoding: 'unicode'
def create_database(name, options = {})
- options = { encoding: 'utf8' }.merge!(options.symbolize_keys)
-
- option_string = options.inject("") do |memo, (key, value)|
- memo += case key
- when :owner
- " OWNER = \"#{value}\""
- when :template
- " TEMPLATE = \"#{value}\""
- when :encoding
- " ENCODING = '#{value}'"
- when :collation
- " LC_COLLATE = '#{value}'"
- when :ctype
- " LC_CTYPE = '#{value}'"
- when :tablespace
- " TABLESPACE = \"#{value}\""
- when :connection_limit
- " CONNECTION LIMIT = #{value}"
- else
- ""
+ options = { encoding: "utf8" }.merge!(options.symbolize_keys)
+
+ option_string = options.each_with_object(+"") do |(key, value), memo|
+ memo << case key
+ when :owner
+ " OWNER = \"#{value}\""
+ when :template
+ " TEMPLATE = \"#{value}\""
+ when :encoding
+ " ENCODING = '#{value}'"
+ when :collation
+ " LC_COLLATE = '#{value}'"
+ when :ctype
+ " LC_CTYPE = '#{value}'"
+ when :tablespace
+ " TABLESPACE = \"#{value}\""
+ when :connection_limit
+ " CONNECTION LIMIT = #{value}"
+ else
+ ""
end
end
@@ -68,119 +54,48 @@ module ActiveRecord
execute "DROP DATABASE IF EXISTS #{quote_table_name(name)}"
end
- # Returns the list of all tables in the schema search path.
- def tables(name = nil)
- if name
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
- Passing arguments to #tables is deprecated without replacement.
- MSG
- end
-
- select_values("SELECT tablename FROM pg_tables WHERE schemaname = ANY(current_schemas(false))", 'SCHEMA')
- end
-
- def data_sources # :nodoc
- select_values(<<-SQL, 'SCHEMA')
- SELECT c.relname
- FROM pg_class c
- LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
- WHERE c.relkind IN ('r', 'v','m') -- (r)elation/table, (v)iew, (m)aterialized view
- AND n.nspname = ANY (current_schemas(false))
- SQL
- end
-
- # Returns true if table exists.
- # If the schema is not specified as part of +name+ then it will only find tables within
- # the current schema search path (regardless of permissions to access tables in other schemas)
- def table_exists?(name)
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
- #table_exists? currently checks both tables and views.
- This behavior is deprecated and will be changed with Rails 5.1 to only check tables.
- Use #data_source_exists? instead.
- MSG
-
- data_source_exists?(name)
- end
-
- def data_source_exists?(name)
- name = Utils.extract_schema_qualified_name(name.to_s)
- return false unless name.identifier
-
- select_value(<<-SQL, 'SCHEMA').to_i > 0
- SELECT COUNT(*)
- FROM pg_class c
- LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
- WHERE c.relkind IN ('r','v','m') -- (r)elation/table, (v)iew, (m)aterialized view
- AND c.relname = '#{name.identifier}'
- AND n.nspname = #{name.schema ? "'#{name.schema}'" : 'ANY (current_schemas(false))'}
- SQL
- end
-
- def views # :nodoc:
- select_values(<<-SQL, 'SCHEMA')
- SELECT c.relname
- FROM pg_class c
- LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
- WHERE c.relkind IN ('v','m') -- (v)iew, (m)aterialized view
- AND n.nspname = ANY (current_schemas(false))
- SQL
- end
-
- def view_exists?(view_name) # :nodoc:
- name = Utils.extract_schema_qualified_name(view_name.to_s)
- return false unless name.identifier
-
- select_values(<<-SQL, 'SCHEMA').any?
- SELECT c.relname
- FROM pg_class c
- LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
- WHERE c.relkind IN ('v','m') -- (v)iew, (m)aterialized view
- AND c.relname = '#{name.identifier}'
- AND n.nspname = #{name.schema ? "'#{name.schema}'" : 'ANY (current_schemas(false))'}
- SQL
- end
-
def drop_table(table_name, options = {}) # :nodoc:
execute "DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_table_name(table_name)}#{' CASCADE' if options[:force] == :cascade}"
end
# Returns true if schema exists.
def schema_exists?(name)
- select_value("SELECT COUNT(*) FROM pg_namespace WHERE nspname = '#{name}'", 'SCHEMA').to_i > 0
+ query_value("SELECT COUNT(*) FROM pg_namespace WHERE nspname = #{quote(name)}", "SCHEMA").to_i > 0
end
# Verifies existence of an index with a given name.
- def index_name_exists?(table_name, index_name, default)
- table = Utils.extract_schema_qualified_name(table_name.to_s)
- index = Utils.extract_schema_qualified_name(index_name.to_s)
+ def index_name_exists?(table_name, index_name)
+ table = quoted_scope(table_name)
+ index = quoted_scope(index_name)
- select_value(<<-SQL, 'SCHEMA').to_i > 0
+ query_value(<<~SQL, "SCHEMA").to_i > 0
SELECT COUNT(*)
FROM pg_class t
INNER JOIN pg_index d ON t.oid = d.indrelid
INNER JOIN pg_class i ON d.indexrelid = i.oid
LEFT JOIN pg_namespace n ON n.oid = i.relnamespace
WHERE i.relkind = 'i'
- AND i.relname = '#{index.identifier}'
- AND t.relname = '#{table.identifier}'
- AND n.nspname = #{index.schema ? "'#{index.schema}'" : 'ANY (current_schemas(false))'}
+ AND i.relname = #{index[:name]}
+ AND t.relname = #{table[:name]}
+ AND n.nspname = #{index[:schema]}
SQL
end
# Returns an array of indexes for the given table.
- def indexes(table_name, name = nil)
- table = Utils.extract_schema_qualified_name(table_name.to_s)
+ def indexes(table_name) # :nodoc:
+ scope = quoted_scope(table_name)
- result = query(<<-SQL, 'SCHEMA')
- SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid
+ result = query(<<~SQL, "SCHEMA")
+ SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid,
+ pg_catalog.obj_description(i.oid, 'pg_class') AS comment
FROM pg_class t
INNER JOIN pg_index d ON t.oid = d.indrelid
INNER JOIN pg_class i ON d.indexrelid = i.oid
LEFT JOIN pg_namespace n ON n.oid = i.relnamespace
WHERE i.relkind = 'i'
AND d.indisprimary = 'f'
- AND t.relname = '#{table.identifier}'
- AND n.nspname = #{table.schema ? "'#{table.schema}'" : 'ANY (current_schemas(false))'}
+ AND t.relname = #{scope[:name]}
+ AND n.nspname = #{scope[:schema]}
ORDER BY i.relname
SQL
@@ -190,73 +105,98 @@ module ActiveRecord
indkey = row[2].split(" ").map(&:to_i)
inddef = row[3]
oid = row[4]
+ comment = row[5]
- columns = Hash[query(<<-SQL, "SCHEMA")]
- SELECT a.attnum, a.attname
- FROM pg_attribute a
- WHERE a.attrelid = #{oid}
- AND a.attnum IN (#{indkey.join(",")})
- SQL
-
- column_names = columns.values_at(*indkey).compact
+ using, expressions, where = inddef.scan(/ USING (\w+?) \((.+?)\)(?: WHERE (.+))?\z/m).flatten
- unless column_names.empty?
- # add info on sort order for columns (only desc order is explicitly specified, asc is the default)
- desc_order_columns = inddef.scan(/(\w+) DESC/).flatten
- orders = desc_order_columns.any? ? Hash[desc_order_columns.map {|order_column| [order_column, :desc]}] : {}
- where = inddef.scan(/WHERE (.+)$/).flatten[0]
- using = inddef.scan(/USING (.+?) /).flatten[0].to_sym
+ orders = {}
+ opclasses = {}
- IndexDefinition.new(table_name, index_name, unique, column_names, [], orders, where, nil, using)
+ if indkey.include?(0)
+ columns = expressions
+ else
+ columns = Hash[query(<<~SQL, "SCHEMA")].values_at(*indkey).compact
+ SELECT a.attnum, a.attname
+ FROM pg_attribute a
+ WHERE a.attrelid = #{oid}
+ AND a.attnum IN (#{indkey.join(",")})
+ SQL
+
+ # add info on sort order (only desc order is explicitly specified, asc is the default)
+ # and non-default opclasses
+ expressions.scan(/(?<column>\w+)"?\s?(?<opclass>\w+_ops)?\s?(?<desc>DESC)?\s?(?<nulls>NULLS (?:FIRST|LAST))?/).each do |column, opclass, desc, nulls|
+ opclasses[column] = opclass.to_sym if opclass
+ if nulls
+ orders[column] = [desc, nulls].compact.join(" ")
+ else
+ orders[column] = :desc if desc
+ end
+ end
end
- end.compact
+
+ IndexDefinition.new(
+ table_name,
+ index_name,
+ unique,
+ columns,
+ orders: orders,
+ opclasses: opclasses,
+ where: where,
+ using: using.to_sym,
+ comment: comment.presence
+ )
+ end
end
- # Returns the list of all column definitions for a table.
- def columns(table_name) # :nodoc:
- table_name = table_name.to_s
- column_definitions(table_name).map do |column_name, type, default, notnull, oid, fmod, collation|
- oid = oid.to_i
- fmod = fmod.to_i
- type_metadata = fetch_type_metadata(column_name, type, oid, fmod)
- default_value = extract_value_from_default(default)
- default_function = extract_default_function(default_value, default)
- new_column(column_name, default_value, type_metadata, !notnull, table_name, default_function, collation)
+ def table_options(table_name) # :nodoc:
+ if comment = table_comment(table_name)
+ { comment: comment }
end
end
- def new_column(name, default, sql_type_metadata, null, table_name, default_function = nil, collation = nil) # :nodoc:
- PostgreSQLColumn.new(name, default, sql_type_metadata, null, table_name, default_function, collation)
+ # Returns a comment stored in database for given table
+ def table_comment(table_name) # :nodoc:
+ scope = quoted_scope(table_name, type: "BASE TABLE")
+ if scope[:name]
+ query_value(<<~SQL, "SCHEMA")
+ SELECT pg_catalog.obj_description(c.oid, 'pg_class')
+ FROM pg_catalog.pg_class c
+ LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
+ WHERE c.relname = #{scope[:name]}
+ AND c.relkind IN (#{scope[:type]})
+ AND n.nspname = #{scope[:schema]}
+ SQL
+ end
end
# Returns the current database name.
def current_database
- select_value('select current_database()', 'SCHEMA')
+ query_value("SELECT current_database()", "SCHEMA")
end
# Returns the current schema name.
def current_schema
- select_value('SELECT current_schema', 'SCHEMA')
+ query_value("SELECT current_schema", "SCHEMA")
end
# Returns the current database encoding format.
def encoding
- select_value("SELECT pg_encoding_to_char(encoding) FROM pg_database WHERE datname LIKE '#{current_database}'", 'SCHEMA')
+ query_value("SELECT pg_encoding_to_char(encoding) FROM pg_database WHERE datname = current_database()", "SCHEMA")
end
# Returns the current database collation.
def collation
- select_value("SELECT datcollate FROM pg_database WHERE datname LIKE '#{current_database}'", 'SCHEMA')
+ query_value("SELECT datcollate FROM pg_database WHERE datname = current_database()", "SCHEMA")
end
# Returns the current database ctype.
def ctype
- select_value("SELECT datctype FROM pg_database WHERE datname LIKE '#{current_database}'", 'SCHEMA')
+ query_value("SELECT datctype FROM pg_database WHERE datname = current_database()", "SCHEMA")
end
# Returns an array of schema names.
def schema_names
- select_values(<<-SQL, 'SCHEMA')
+ query_values(<<~SQL, "SCHEMA")
SELECT nspname
FROM pg_namespace
WHERE nspname !~ '^pg_.*'
@@ -266,7 +206,7 @@ module ActiveRecord
end
# Creates a schema for the given schema name.
- def create_schema schema_name
+ def create_schema(schema_name)
execute "CREATE SCHEMA #{quote_schema_name(schema_name)}"
end
@@ -277,42 +217,42 @@ module ActiveRecord
# Sets the schema search path to a string of comma-separated schema names.
# Names beginning with $ have to be quoted (e.g. $user => '$user').
- # See: http://www.postgresql.org/docs/current/static/ddl-schemas.html
+ # See: https://www.postgresql.org/docs/current/static/ddl-schemas.html
#
# This should be not be called manually but set in database.yml.
def schema_search_path=(schema_csv)
if schema_csv
- execute("SET search_path TO #{schema_csv}", 'SCHEMA')
+ execute("SET search_path TO #{schema_csv}", "SCHEMA")
@schema_search_path = schema_csv
end
end
# Returns the active schema search path.
def schema_search_path
- @schema_search_path ||= select_value('SHOW search_path', 'SCHEMA')
+ @schema_search_path ||= query_value("SHOW search_path", "SCHEMA")
end
# Returns the current client message level.
def client_min_messages
- select_value('SHOW client_min_messages', 'SCHEMA')
+ query_value("SHOW client_min_messages", "SCHEMA")
end
# Set the client message level.
def client_min_messages=(level)
- execute("SET client_min_messages TO '#{level}'", 'SCHEMA')
+ execute("SET client_min_messages TO '#{level}'", "SCHEMA")
end
# Returns the sequence name for a table's primary key or some other specified key.
- def default_sequence_name(table_name, pk = nil) #:nodoc:
- result = serial_sequence(table_name, pk || 'id')
+ def default_sequence_name(table_name, pk = "id") #:nodoc:
+ result = serial_sequence(table_name, pk)
return nil unless result
Utils.extract_schema_qualified_name(result).to_s
rescue ActiveRecord::StatementInvalid
- PostgreSQL::Name.new(nil, "#{table_name}_#{pk || 'id'}_seq").to_s
+ PostgreSQL::Name.new(nil, "#{table_name}_#{pk}_seq").to_s
end
def serial_sequence(table, column)
- select_value("SELECT pg_get_serial_sequence('#{table}', '#{column}')", 'SCHEMA')
+ query_value("SELECT pg_get_serial_sequence(#{quote(table)}, #{quote(column)})", "SCHEMA")
end
# Sets the sequence of a table's primary key to the specified value.
@@ -323,16 +263,16 @@ module ActiveRecord
if sequence
quoted_sequence = quote_table_name(sequence)
- select_value("SELECT setval('#{quoted_sequence}', #{value})", 'SCHEMA')
+ query_value("SELECT setval(#{quote(quoted_sequence)}, #{value})", "SCHEMA")
else
- @logger.warn "#{table} has primary key #{pk} with no default sequence" if @logger
+ @logger.warn "#{table} has primary key #{pk} with no default sequence." if @logger
end
end
end
# Resets the sequence of a table's primary key to the maximum value.
def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:
- unless pk and sequence
+ unless pk && sequence
default_pk, default_sequence = pk_and_sequence_for(table)
pk ||= default_pk
@@ -340,15 +280,21 @@ module ActiveRecord
end
if @logger && pk && !sequence
- @logger.warn "#{table} has primary key #{pk} with no default sequence"
+ @logger.warn "#{table} has primary key #{pk} with no default sequence."
end
if pk && sequence
quoted_sequence = quote_table_name(sequence)
+ max_pk = query_value("SELECT MAX(#{quote_column_name pk}) FROM #{quote_table_name(table)}", "SCHEMA")
+ if max_pk.nil?
+ if postgresql_version >= 100000
+ minvalue = query_value("SELECT seqmin FROM pg_sequence WHERE seqrelid = #{quote(quoted_sequence)}::regclass", "SCHEMA")
+ else
+ minvalue = query_value("SELECT min_value FROM #{quoted_sequence}", "SCHEMA")
+ end
+ end
- select_value(<<-end_sql, 'SCHEMA')
- SELECT setval('#{quoted_sequence}', (SELECT COALESCE(MAX(#{quote_column_name pk})+(SELECT increment_by FROM #{quoted_sequence}), (SELECT min_value FROM #{quoted_sequence})) FROM #{quote_table_name(table)}), false)
- end_sql
+ query_value("SELECT setval(#{quote(quoted_sequence)}, #{max_pk ? max_pk : minvalue}, #{max_pk ? true : false})", "SCHEMA")
end
end
@@ -356,7 +302,7 @@ module ActiveRecord
def pk_and_sequence_for(table) #:nodoc:
# First try looking for a sequence with a dependency on the
# given table's primary key.
- result = query(<<-end_sql, 'SCHEMA')[0]
+ result = query(<<~SQL, "SCHEMA")[0]
SELECT attr.attname, nsp.nspname, seq.relname
FROM pg_class seq,
pg_attribute attr,
@@ -372,11 +318,11 @@ module ActiveRecord
AND seq.relnamespace = nsp.oid
AND cons.contype = 'p'
AND dep.classid = 'pg_class'::regclass
- AND dep.refobjid = '#{quote_table_name(table)}'::regclass
- end_sql
+ AND dep.refobjid = #{quote(quote_table_name(table))}::regclass
+ SQL
- if result.nil? or result.empty?
- result = query(<<-end_sql, 'SCHEMA')[0]
+ if result.nil? || result.empty?
+ result = query(<<~SQL, "SCHEMA")[0]
SELECT attr.attname, nsp.nspname,
CASE
WHEN pg_get_expr(def.adbin, def.adrelid) !~* 'nextval' THEN NULL
@@ -390,10 +336,10 @@ module ActiveRecord
JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)
JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])
JOIN pg_namespace nsp ON (t.relnamespace = nsp.oid)
- WHERE t.oid = '#{quote_table_name(table)}'::regclass
+ WHERE t.oid = #{quote(quote_table_name(table))}::regclass
AND cons.contype = 'p'
AND pg_get_expr(def.adbin, def.adrelid) ~* 'nextval|uuid_generate'
- end_sql
+ SQL
end
pk = result.shift
@@ -407,20 +353,46 @@ module ActiveRecord
end
def primary_keys(table_name) # :nodoc:
- select_values(<<-SQL.strip_heredoc, 'SCHEMA')
- WITH pk_constraint AS (
- SELECT conrelid, unnest(conkey) AS connum FROM pg_constraint
- WHERE contype = 'p'
- AND conrelid = '#{quote_table_name(table_name)}'::regclass
- ), cons AS (
- SELECT conrelid, connum, row_number() OVER() AS rownum FROM pk_constraint
- )
- SELECT attr.attname FROM pg_attribute attr
- INNER JOIN cons ON attr.attrelid = cons.conrelid AND attr.attnum = cons.connum
- ORDER BY cons.rownum
+ query_values(<<~SQL, "SCHEMA")
+ SELECT a.attname
+ FROM (
+ SELECT indrelid, indkey, generate_subscripts(indkey, 1) idx
+ FROM pg_index
+ WHERE indrelid = #{quote(quote_table_name(table_name))}::regclass
+ AND indisprimary
+ ) i
+ JOIN pg_attribute a
+ ON a.attrelid = i.indrelid
+ AND a.attnum = i.indkey[i.idx]
+ ORDER BY i.idx
SQL
end
+ def bulk_change_table(table_name, operations)
+ sql_fragments = []
+ non_combinable_operations = []
+
+ operations.each do |command, args|
+ table, arguments = args.shift, args
+ method = :"#{command}_for_alter"
+
+ if respond_to?(method, true)
+ sqls, procs = Array(send(method, table, *arguments)).partition { |v| v.is_a?(String) }
+ sql_fragments << sqls
+ non_combinable_operations.concat(procs)
+ else
+ execute "ALTER TABLE #{quote_table_name(table_name)} #{sql_fragments.join(", ")}" unless sql_fragments.empty?
+ non_combinable_operations.each(&:call)
+ sql_fragments = []
+ non_combinable_operations = []
+ send(command, table, *arguments)
+ end
+ end
+
+ execute "ALTER TABLE #{quote_table_name(table_name)} #{sql_fragments.join(", ")}" unless sql_fragments.empty?
+ non_combinable_operations.each(&:call)
+ end
+
# Renames a table.
# Also renames a table's primary key sequence if the sequence name exists and
# matches the Active Record default.
@@ -431,67 +403,55 @@ module ActiveRecord
clear_cache!
execute "ALTER TABLE #{quote_table_name(table_name)} RENAME TO #{quote_table_name(new_name)}"
pk, seq = pk_and_sequence_for(new_name)
- if seq && seq.identifier == "#{table_name}_#{pk}_seq"
- new_seq = "#{new_name}_#{pk}_seq"
+ if pk
idx = "#{table_name}_pkey"
new_idx = "#{new_name}_pkey"
- execute "ALTER TABLE #{seq.quoted} RENAME TO #{quote_table_name(new_seq)}"
execute "ALTER INDEX #{quote_table_name(idx)} RENAME TO #{quote_table_name(new_idx)}"
+ if seq && seq.identifier == "#{table_name}_#{pk}_seq"
+ new_seq = "#{new_name}_#{pk}_seq"
+ execute "ALTER TABLE #{seq.quoted} RENAME TO #{quote_table_name(new_seq)}"
+ end
end
-
rename_table_indexes(table_name, new_name)
end
def add_column(table_name, column_name, type, options = {}) #:nodoc:
clear_cache!
super
+ change_column_comment(table_name, column_name, options[:comment]) if options.key?(:comment)
end
def change_column(table_name, column_name, type, options = {}) #:nodoc:
clear_cache!
- quoted_table_name = quote_table_name(table_name)
- quoted_column_name = quote_column_name(column_name)
- sql_type = type_to_sql(type, options[:limit], options[:precision], options[:scale], options[:array])
- sql = "ALTER TABLE #{quoted_table_name} ALTER COLUMN #{quoted_column_name} TYPE #{sql_type}"
- if options[:collation]
- sql << " COLLATE \"#{options[:collation]}\""
- end
- if options[:using]
- sql << " USING #{options[:using]}"
- elsif options[:cast_as]
- cast_as_type = type_to_sql(options[:cast_as], options[:limit], options[:precision], options[:scale], options[:array])
- sql << " USING CAST(#{quoted_column_name} AS #{cast_as_type})"
- end
- execute sql
-
- change_column_default(table_name, column_name, options[:default]) if options_include_default?(options)
- change_column_null(table_name, column_name, options[:null], options[:default]) if options.key?(:null)
+ sqls, procs = Array(change_column_for_alter(table_name, column_name, type, options)).partition { |v| v.is_a?(String) }
+ execute "ALTER TABLE #{quote_table_name(table_name)} #{sqls.join(", ")}"
+ procs.each(&:call)
end
# Changes the default value of a table column.
def change_column_default(table_name, column_name, default_or_changes) # :nodoc:
- clear_cache!
- column = column_for(table_name, column_name)
- return unless column
-
- default = extract_new_default_value(default_or_changes)
- alter_column_query = "ALTER TABLE #{quote_table_name(table_name)} ALTER COLUMN #{quote_column_name(column_name)} %s"
- if default.nil?
- # <tt>DEFAULT NULL</tt> results in the same behavior as <tt>DROP DEFAULT</tt>. However, PostgreSQL will
- # cast the default to the columns type, which leaves us with a default like "default NULL::character varying".
- execute alter_column_query % "DROP DEFAULT"
- else
- execute alter_column_query % "SET DEFAULT #{quote_default_expression(default, column)}"
- end
+ execute "ALTER TABLE #{quote_table_name(table_name)} #{change_column_default_for_alter(table_name, column_name, default_or_changes)}"
end
def change_column_null(table_name, column_name, null, default = nil) #:nodoc:
clear_cache!
unless null || default.nil?
column = column_for(table_name, column_name)
- execute("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote_default_expression(default, column)} WHERE #{quote_column_name(column_name)} IS NULL") if column
+ execute "UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote_default_expression(default, column)} WHERE #{quote_column_name(column_name)} IS NULL" if column
end
- execute("ALTER TABLE #{quote_table_name(table_name)} ALTER #{quote_column_name(column_name)} #{null ? 'DROP' : 'SET'} NOT NULL")
+ execute "ALTER TABLE #{quote_table_name(table_name)} #{change_column_null_for_alter(table_name, column_name, null, default)}"
+ end
+
+ # Adds comment for given table column or drops it if +comment+ is a +nil+
+ def change_column_comment(table_name, column_name, comment) # :nodoc:
+ clear_cache!
+ execute "COMMENT ON COLUMN #{quote_table_name(table_name)}.#{quote_column_name(column_name)} IS #{quote(comment)}"
+ end
+
+ # Adds comment for given table or drops it if +comment+ is a +nil+
+ def change_table_comment(table_name, comment) # :nodoc:
+ clear_cache!
+ execute "COMMENT ON TABLE #{quote_table_name(table_name)} IS #{quote(comment)}"
end
# Renames a column in a table.
@@ -502,8 +462,10 @@ module ActiveRecord
end
def add_index(table_name, column_name, options = {}) #:nodoc:
- index_name, index_type, index_columns, index_options, index_algorithm, index_using = add_index_options(table_name, column_name, options)
- execute "CREATE #{index_type} INDEX #{index_algorithm} #{quote_column_name(index_name)} ON #{quote_table_name(table_name)} #{index_using} (#{index_columns})#{index_options}"
+ index_name, index_type, index_columns_and_opclasses, index_options, index_algorithm, index_using, comment = add_index_options(table_name, column_name, options)
+ execute("CREATE #{index_type} INDEX #{index_algorithm} #{quote_column_name(index_name)} ON #{quote_table_name(table_name)} #{index_using} (#{index_columns_and_opclasses})#{index_options}").tap do
+ execute "COMMENT ON INDEX #{quote_column_name(index_name)} IS #{quote(comment)}" if comment
+ end
end
def remove_index(table_name, options = {}) #:nodoc:
@@ -539,8 +501,9 @@ module ActiveRecord
end
def foreign_keys(table_name)
- fk_info = select_all <<-SQL.strip_heredoc
- SELECT t2.oid::regclass::text AS to_table, a1.attname AS column, a2.attname AS primary_key, c.conname AS name, c.confupdtype AS on_update, c.confdeltype AS on_delete
+ scope = quoted_scope(table_name)
+ fk_info = exec_query(<<~SQL, "SCHEMA")
+ SELECT t2.oid::regclass::text AS to_table, a1.attname AS column, a2.attname AS primary_key, c.conname AS name, c.confupdtype AS on_update, c.confdeltype AS on_delete, c.convalidated AS valid
FROM pg_constraint c
JOIN pg_class t1 ON c.conrelid = t1.oid
JOIN pg_class t2 ON c.confrelid = t2.oid
@@ -548,94 +511,269 @@ module ActiveRecord
JOIN pg_attribute a2 ON a2.attnum = c.confkey[1] AND a2.attrelid = t2.oid
JOIN pg_namespace t3 ON c.connamespace = t3.oid
WHERE c.contype = 'f'
- AND t1.relname = #{quote(table_name)}
- AND t3.nspname = ANY (current_schemas(false))
+ AND t1.relname = #{scope[:name]}
+ AND t3.nspname = #{scope[:schema]}
ORDER BY c.conname
SQL
fk_info.map do |row|
options = {
- column: row['column'],
- name: row['name'],
- primary_key: row['primary_key']
+ column: row["column"],
+ name: row["name"],
+ primary_key: row["primary_key"]
}
- options[:on_delete] = extract_foreign_key_action(row['on_delete'])
- options[:on_update] = extract_foreign_key_action(row['on_update'])
+ options[:on_delete] = extract_foreign_key_action(row["on_delete"])
+ options[:on_update] = extract_foreign_key_action(row["on_update"])
+ options[:validate] = row["valid"]
- ForeignKeyDefinition.new(table_name, row['to_table'], options)
+ ForeignKeyDefinition.new(table_name, row["to_table"], options)
end
end
- def extract_foreign_key_action(specifier) # :nodoc:
- case specifier
- when 'c'; :cascade
- when 'n'; :nullify
- when 'r'; :restrict
- end
+ def foreign_tables
+ query_values(data_source_sql(type: "FOREIGN TABLE"), "SCHEMA")
end
- def index_name_length
- 63
+ def foreign_table_exists?(table_name)
+ query_values(data_source_sql(table_name, type: "FOREIGN TABLE"), "SCHEMA").any? if table_name.present?
end
# Maps logical Rails types to PostgreSQL-specific data types.
- def type_to_sql(type, limit = nil, precision = nil, scale = nil, array = nil)
- sql = case type.to_s
- when 'binary'
- # PostgreSQL doesn't support limits on binary (bytea) columns.
- # The hard limit is 1GB, because of a 32-bit size field, and TOAST.
- case limit
- when nil, 0..0x3fffffff; super(type)
- else raise(ActiveRecordError, "No binary type has byte size #{limit}.")
- end
- when 'text'
- # PostgreSQL doesn't support limits on text columns.
- # The hard limit is 1GB, according to section 8.3 in the manual.
- case limit
- when nil, 0..0x3fffffff; super(type)
- else raise(ActiveRecordError, "The limit on text can be at most 1GB - 1byte.")
- end
- when 'integer'
- case limit
- when 1, 2; 'smallint'
- when nil, 3, 4; 'integer'
- when 5..8; 'bigint'
- else raise(ActiveRecordError, "No integer type has byte size #{limit}. Use a numeric with precision 0 instead.")
+ def type_to_sql(type, limit: nil, precision: nil, scale: nil, array: nil, **) # :nodoc:
+ sql = \
+ case type.to_s
+ when "binary"
+ # PostgreSQL doesn't support limits on binary (bytea) columns.
+ # The hard limit is 1GB, because of a 32-bit size field, and TOAST.
+ case limit
+ when nil, 0..0x3fffffff; super(type)
+ else raise ActiveRecordError, "No binary type has byte size #{limit}. The limit on binary can be at most 1GB - 1byte."
+ end
+ when "text"
+ # PostgreSQL doesn't support limits on text columns.
+ # The hard limit is 1GB, according to section 8.3 in the manual.
+ case limit
+ when nil, 0..0x3fffffff; super(type)
+ else raise ActiveRecordError, "No text type has byte size #{limit}. The limit on text can be at most 1GB - 1byte."
+ end
+ when "integer"
+ case limit
+ when 1, 2; "smallint"
+ when nil, 3, 4; "integer"
+ when 5..8; "bigint"
+ else raise(ActiveRecordError, "No integer type has byte size #{limit}. Use a numeric with scale 0 instead.")
+ end
+ else
+ super
end
- else
- super(type, limit, precision, scale)
- end
- sql << '[]' if array && type != :primary_key
+ sql = "#{sql}[]" if array && type != :primary_key
sql
end
# PostgreSQL requires the ORDER BY columns in the select list for distinct queries, and
# requires that the ORDER BY include the distinct column.
def columns_for_distinct(columns, orders) #:nodoc:
- order_columns = orders.reject(&:blank?).map{ |s|
+ order_columns = orders.reject(&:blank?).map { |s|
# Convert Arel node to string
s = s.to_sql unless s.is_a?(String)
# Remove any ASC/DESC modifiers
- s.gsub(/\s+(?:ASC|DESC)\b/i, '')
- .gsub(/\s+NULLS\s+(?:FIRST|LAST)\b/i, '')
+ s.gsub(/\s+(?:ASC|DESC)\b/i, "")
+ .gsub(/\s+NULLS\s+(?:FIRST|LAST)\b/i, "")
}.reject(&:blank?).map.with_index { |column, i| "#{column} AS alias_#{i}" }
- [super, *order_columns].join(', ')
+ (order_columns << super).join(", ")
end
- def fetch_type_metadata(column_name, sql_type, oid, fmod)
- cast_type = get_oid_type(oid, fmod, column_name, sql_type)
- simple_type = SqlTypeMetadata.new(
- sql_type: sql_type,
- type: cast_type.type,
- limit: cast_type.limit,
- precision: cast_type.precision,
- scale: cast_type.scale,
- )
- PostgreSQLTypeMetadata.new(simple_type, oid: oid, fmod: fmod)
+ def update_table_definition(table_name, base) # :nodoc:
+ PostgreSQL::Table.new(table_name, base)
+ end
+
+ def create_schema_dumper(options) # :nodoc:
+ PostgreSQL::SchemaDumper.create(self, options)
+ end
+
+ # Validates the given constraint.
+ #
+ # Validates the constraint named +constraint_name+ on +accounts+.
+ #
+ # validate_constraint :accounts, :constraint_name
+ def validate_constraint(table_name, constraint_name)
+ return unless supports_validate_constraints?
+
+ at = create_alter_table table_name
+ at.validate_constraint constraint_name
+
+ execute schema_creation.accept(at)
+ end
+
+ # Validates the given foreign key.
+ #
+ # Validates the foreign key on +accounts.branch_id+.
+ #
+ # validate_foreign_key :accounts, :branches
+ #
+ # Validates the foreign key on +accounts.owner_id+.
+ #
+ # validate_foreign_key :accounts, column: :owner_id
+ #
+ # Validates the foreign key named +special_fk_name+ on the +accounts+ table.
+ #
+ # validate_foreign_key :accounts, name: :special_fk_name
+ #
+ # The +options+ hash accepts the same keys as SchemaStatements#add_foreign_key.
+ def validate_foreign_key(from_table, to_table = nil, **options)
+ return unless supports_validate_constraints?
+
+ fk_name_to_validate = foreign_key_for!(from_table, to_table: to_table, **options).name
+
+ validate_constraint from_table, fk_name_to_validate
end
+
+ private
+ def schema_creation
+ PostgreSQL::SchemaCreation.new(self)
+ end
+
+ def create_table_definition(*args)
+ PostgreSQL::TableDefinition.new(self, *args)
+ end
+
+ def create_alter_table(name)
+ PostgreSQL::AlterTable.new create_table_definition(name)
+ end
+
+ def new_column_from_field(table_name, field)
+ column_name, type, default, notnull, oid, fmod, collation, comment = field
+ type_metadata = fetch_type_metadata(column_name, type, oid.to_i, fmod.to_i)
+ default_value = extract_value_from_default(default)
+ default_function = extract_default_function(default_value, default)
+
+ PostgreSQLColumn.new(
+ column_name,
+ default_value,
+ type_metadata,
+ !notnull,
+ table_name,
+ default_function,
+ collation,
+ comment: comment.presence,
+ max_identifier_length: max_identifier_length
+ )
+ end
+
+ def fetch_type_metadata(column_name, sql_type, oid, fmod)
+ cast_type = get_oid_type(oid, fmod, column_name, sql_type)
+ simple_type = SqlTypeMetadata.new(
+ sql_type: sql_type,
+ type: cast_type.type,
+ limit: cast_type.limit,
+ precision: cast_type.precision,
+ scale: cast_type.scale,
+ )
+ PostgreSQLTypeMetadata.new(simple_type, oid: oid, fmod: fmod)
+ end
+
+ def extract_foreign_key_action(specifier)
+ case specifier
+ when "c"; :cascade
+ when "n"; :nullify
+ when "r"; :restrict
+ end
+ end
+
+ def add_column_for_alter(table_name, column_name, type, options = {})
+ return super unless options.key?(:comment)
+ [super, Proc.new { change_column_comment(table_name, column_name, options[:comment]) }]
+ end
+
+ def change_column_for_alter(table_name, column_name, type, options = {})
+ td = create_table_definition(table_name)
+ cd = td.new_column_definition(column_name, type, options)
+ sqls = [schema_creation.accept(ChangeColumnDefinition.new(cd, column_name))]
+ sqls << Proc.new { change_column_comment(table_name, column_name, options[:comment]) } if options.key?(:comment)
+ sqls
+ end
+
+ def change_column_default_for_alter(table_name, column_name, default_or_changes)
+ column = column_for(table_name, column_name)
+ return unless column
+
+ default = extract_new_default_value(default_or_changes)
+ alter_column_query = "ALTER COLUMN #{quote_column_name(column_name)} %s"
+ if default.nil?
+ # <tt>DEFAULT NULL</tt> results in the same behavior as <tt>DROP DEFAULT</tt>. However, PostgreSQL will
+ # cast the default to the columns type, which leaves us with a default like "default NULL::character varying".
+ alter_column_query % "DROP DEFAULT"
+ else
+ alter_column_query % "SET DEFAULT #{quote_default_expression(default, column)}"
+ end
+ end
+
+ def change_column_null_for_alter(table_name, column_name, null, default = nil)
+ "ALTER COLUMN #{quote_column_name(column_name)} #{null ? 'DROP' : 'SET'} NOT NULL"
+ end
+
+ def add_timestamps_for_alter(table_name, options = {})
+ options[:null] = false if options[:null].nil?
+
+ if !options.key?(:precision) && supports_datetime_with_precision?
+ options[:precision] = 6
+ end
+
+ [add_column_for_alter(table_name, :created_at, :datetime, options), add_column_for_alter(table_name, :updated_at, :datetime, options)]
+ end
+
+ def remove_timestamps_for_alter(table_name, options = {})
+ [remove_column_for_alter(table_name, :updated_at), remove_column_for_alter(table_name, :created_at)]
+ end
+
+ def add_index_opclass(quoted_columns, **options)
+ opclasses = options_for_index_columns(options[:opclass])
+ quoted_columns.each do |name, column|
+ column << " #{opclasses[name]}" if opclasses[name].present?
+ end
+ end
+
+ def add_options_for_index_columns(quoted_columns, **options)
+ quoted_columns = add_index_opclass(quoted_columns, options)
+ super
+ end
+
+ def data_source_sql(name = nil, type: nil)
+ scope = quoted_scope(name, type: type)
+ scope[:type] ||= "'r','v','m','p','f'" # (r)elation/table, (v)iew, (m)aterialized view, (p)artitioned table, (f)oreign table
+
+ sql = +"SELECT c.relname FROM pg_class c LEFT JOIN pg_namespace n ON n.oid = c.relnamespace"
+ sql << " WHERE n.nspname = #{scope[:schema]}"
+ sql << " AND c.relname = #{scope[:name]}" if scope[:name]
+ sql << " AND c.relkind IN (#{scope[:type]})"
+ sql
+ end
+
+ def quoted_scope(name = nil, type: nil)
+ schema, name = extract_schema_qualified_name(name)
+ type = \
+ case type
+ when "BASE TABLE"
+ "'r','p'"
+ when "VIEW"
+ "'v','m'"
+ when "FOREIGN TABLE"
+ "'f'"
+ end
+ scope = {}
+ scope[:schema] = schema ? quote(schema) : "ANY (current_schemas(false))"
+ scope[:name] = quote(name) if name
+ scope[:type] = type if type
+ scope
+ end
+
+ def extract_schema_qualified_name(string)
+ name = Utils.extract_schema_qualified_name(string.to_s)
+ [name.schema, name.identifier]
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/type_metadata.rb b/activerecord/lib/active_record/connection_adapters/postgresql/type_metadata.rb
index b2c49989a4..cd69d28139 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/type_metadata.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/type_metadata.rb
@@ -1,6 +1,11 @@
+# frozen_string_literal: true
+
module ActiveRecord
+ # :stopdoc:
module ConnectionAdapters
class PostgreSQLTypeMetadata < DelegateClass(SqlTypeMetadata)
+ undef to_yaml if method_defined?(:to_yaml)
+
attr_reader :oid, :fmod, :array
def initialize(type_metadata, oid: nil, fmod: nil)
@@ -8,11 +13,11 @@ module ActiveRecord
@type_metadata = type_metadata
@oid = oid
@fmod = fmod
- @array = /\[\]$/ === type_metadata.sql_type
+ @array = /\[\]$/.match?(type_metadata.sql_type)
end
def sql_type
- super.gsub(/\[\]$/, "".freeze)
+ super.gsub(/\[\]$/, "")
end
def ==(other)
@@ -27,9 +32,9 @@ module ActiveRecord
protected
- def attributes_for_hash
- [self.class, @type_metadata, oid, fmod]
- end
+ def attributes_for_hash
+ [self.class, @type_metadata, oid, fmod]
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/utils.rb b/activerecord/lib/active_record/connection_adapters/postgresql/utils.rb
index 9a0b80d7d3..f2f4701500 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql/utils.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/utils.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module PostgreSQL
@@ -19,9 +21,9 @@ module ActiveRecord
def quoted
if schema
- PGconn.quote_ident(schema) << SEPARATOR << PGconn.quote_ident(identifier)
+ PG::Connection.quote_ident(schema) << SEPARATOR << PG::Connection.quote_ident(identifier)
else
- PGconn.quote_ident(identifier)
+ PG::Connection.quote_ident(identifier)
end
end
@@ -35,6 +37,12 @@ module ActiveRecord
end
protected
+
+ def parts
+ @parts ||= [@schema, @identifier].compact
+ end
+
+ private
def unquote(part)
if part && part.start_with?('"')
part[1..-2]
@@ -42,10 +50,6 @@ module ActiveRecord
part
end
end
-
- def parts
- @parts ||= [@schema, @identifier].compact
- end
end
module Utils # :nodoc:
@@ -53,7 +57,7 @@ module ActiveRecord
# Returns an instance of <tt>ActiveRecord::ConnectionAdapters::PostgreSQL::Name</tt>
# extracted from +string+.
- # +schema+ is nil if not specified in +string+.
+ # +schema+ is +nil+ if not specified in +string+.
# +schema+ and +identifier+ exclude surrounding quotes (regardless of whether provided in +string+)
# +string+ supports the range of schema/table references understood by PostgreSQL, for example:
#
@@ -64,7 +68,7 @@ module ActiveRecord
# * <tt>"schema_name".table_name</tt>
# * <tt>"schema.name"."table name"</tt>
def extract_schema_qualified_name(string)
- schema, table = string.scan(/[^".\s]+|"[^"]*"/)
+ schema, table = string.scan(/[^".]+|"[^"]*"/)
if table.nil?
table = schema
schema = nil
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb b/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb
index 6497b1cc31..29f764e8f4 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb
@@ -1,22 +1,31 @@
+# frozen_string_literal: true
+
# Make sure we're using pg high enough for type casts and Ruby 2.2+ compatibility
-gem 'pg', '~> 0.18'
-require 'pg'
+gem "pg", ">= 0.18", "< 2.0"
+require "pg"
+
+# Use async_exec instead of exec_params on pg versions before 1.1
+class ::PG::Connection # :nodoc:
+ unless self.public_method_defined?(:async_exec_params)
+ remove_method :exec_params
+ alias exec_params async_exec
+ end
+end
require "active_record/connection_adapters/abstract_adapter"
+require "active_record/connection_adapters/statement_pool"
require "active_record/connection_adapters/postgresql/column"
require "active_record/connection_adapters/postgresql/database_statements"
require "active_record/connection_adapters/postgresql/explain_pretty_printer"
require "active_record/connection_adapters/postgresql/oid"
require "active_record/connection_adapters/postgresql/quoting"
require "active_record/connection_adapters/postgresql/referential_integrity"
+require "active_record/connection_adapters/postgresql/schema_creation"
require "active_record/connection_adapters/postgresql/schema_definitions"
require "active_record/connection_adapters/postgresql/schema_dumper"
require "active_record/connection_adapters/postgresql/schema_statements"
require "active_record/connection_adapters/postgresql/type_metadata"
require "active_record/connection_adapters/postgresql/utils"
-require "active_record/connection_adapters/statement_pool"
-
-require 'ipaddr'
module ActiveRecord
module ConnectionHandling # :nodoc:
@@ -30,13 +39,18 @@ module ActiveRecord
conn_params[:user] = conn_params.delete(:username) if conn_params[:username]
conn_params[:dbname] = conn_params.delete(:database) if conn_params[:database]
- # Forward only valid config params to PGconn.connect.
- valid_conn_param_keys = PGconn.conndefaults_hash.keys + [:requiressl]
+ # Forward only valid config params to PG::Connection.connect.
+ valid_conn_param_keys = PG::Connection.conndefaults_hash.keys + [:requiressl]
conn_params.slice!(*valid_conn_param_keys)
- # The postgres drivers don't allow the creation of an unconnected PGconn object,
- # so just pass a nil connection object for the time being.
- ConnectionAdapters::PostgreSQLAdapter.new(nil, logger, conn_params, config)
+ conn = PG.connect(conn_params)
+ ConnectionAdapters::PostgreSQLAdapter.new(conn, logger, conn_params, config)
+ rescue ::PG::Error => error
+ if error.message.include?("does not exist")
+ raise ActiveRecord::NoDatabaseError
+ else
+ raise
+ end
end
end
@@ -63,19 +77,32 @@ module ActiveRecord
# defaults to true.
#
# Any further options are used as connection parameters to libpq. See
- # http://www.postgresql.org/docs/current/static/libpq-connect.html for the
+ # https://www.postgresql.org/docs/current/static/libpq-connect.html for the
# list of parameters.
#
# In addition, default connection parameters of libpq can be set per environment variables.
- # See http://www.postgresql.org/docs/current/static/libpq-envars.html .
+ # See https://www.postgresql.org/docs/current/static/libpq-envars.html .
class PostgreSQLAdapter < AbstractAdapter
- ADAPTER_NAME = 'PostgreSQL'.freeze
+ ADAPTER_NAME = "PostgreSQL"
+
+ ##
+ # :singleton-method:
+ # PostgreSQL allows the creation of "unlogged" tables, which do not record
+ # data in the PostgreSQL Write-Ahead Log. This can make the tables faster,
+ # but significantly increases the risk of data loss if the database
+ # crashes. As a result, this should not be used in production
+ # environments. If you would like all created tables to be unlogged in
+ # the test environment you can add the following line to your test.rb
+ # file:
+ #
+ # ActiveRecord::ConnectionAdapters::PostgreSQLAdapter.create_unlogged_tables = true
+ class_attribute :create_unlogged_tables, default: false
NATIVE_DATABASE_TYPES = {
- primary_key: "serial primary key",
+ primary_key: "bigserial primary key",
string: { name: "character varying" },
text: { name: "text" },
- integer: { name: "integer" },
+ integer: { name: "integer", limit: 4 },
float: { name: "float" },
decimal: { name: "decimal" },
datetime: { name: "timestamp" },
@@ -110,6 +137,8 @@ module ActiveRecord
bit: { name: "bit" },
bit_varying: { name: "bit varying" },
money: { name: "money" },
+ interval: { name: "interval" },
+ oid: { name: "oid" },
}
OID = PostgreSQL::OID #:nodoc:
@@ -118,16 +147,8 @@ module ActiveRecord
include PostgreSQL::ReferentialIntegrity
include PostgreSQL::SchemaStatements
include PostgreSQL::DatabaseStatements
- include PostgreSQL::ColumnDumper
- include Savepoints
- def schema_creation # :nodoc:
- PostgreSQL::SchemaCreation.new self
- end
-
- # Returns true, since this connection adapter supports prepared statement
- # caching.
- def supports_statement_cache?
+ def supports_bulk_alter?
true
end
@@ -139,6 +160,10 @@ module ActiveRecord
true
end
+ def supports_expression_index?
+ true
+ end
+
def supports_transaction_isolation?
true
end
@@ -147,6 +172,10 @@ module ActiveRecord
true
end
+ def supports_validate_constraints?
+ true
+ end
+
def supports_views?
true
end
@@ -156,14 +185,33 @@ module ActiveRecord
end
def supports_json?
- postgresql_version >= 90200
+ true
+ end
+
+ def supports_comments?
+ true
end
+ def supports_savepoints?
+ true
+ end
+
+ def supports_insert_returning?
+ true
+ end
+
+ def supports_insert_on_conflict?
+ postgresql_version >= 90500
+ end
+ alias supports_insert_on_duplicate_skip? supports_insert_on_conflict?
+ alias supports_insert_on_duplicate_update? supports_insert_on_conflict?
+ alias supports_insert_conflict_target? supports_insert_on_conflict?
+
def index_algorithms
- { concurrently: 'CONCURRENTLY' }
+ { concurrently: "CONCURRENTLY" }
end
- class StatementPool < ConnectionAdapters::StatementPool
+ class StatementPool < ConnectionAdapters::StatementPool # :nodoc:
def initialize(connection, max)
super(max)
@connection = connection
@@ -179,14 +227,14 @@ module ActiveRecord
end
private
-
def dealloc(key)
@connection.query "DEALLOCATE #{key}" if connection_active?
+ rescue PG::Error
end
def connection_active?
- @connection.status == PGconn::CONNECTION_OK
- rescue PGError
+ @connection.status == PG::CONNECTION_OK
+ rescue PG::Error
false
end
end
@@ -195,94 +243,75 @@ module ActiveRecord
def initialize(connection, logger, connection_parameters, config)
super(connection, logger, config)
- @visitor = Arel::Visitors::PostgreSQL.new self
- if self.class.type_cast_config_to_boolean(config.fetch(:prepared_statements) { true })
- @prepared_statements = true
- @visitor.extend(DetermineIfPreparableVisitor)
- else
- @prepared_statements = false
- end
-
@connection_parameters = connection_parameters
# @local_tz is initialized as nil to avoid warnings when connect tries to use it
@local_tz = nil
- @table_alias_length = nil
+ @max_identifier_length = nil
- connect
+ configure_connection
add_pg_encoders
- @statements = StatementPool.new @connection,
- self.class.type_cast_config_to_integer(config.fetch(:statement_limit) { 1000 })
-
- if postgresql_version < 90100
- raise "Your version of PostgreSQL (#{postgresql_version}) is too old. Active Record supports PostgreSQL >= 9.1."
- end
-
add_pg_decoders
@type_map = Type::HashLookupTypeMap.new
- initialize_type_map(type_map)
- @local_tz = execute('SHOW TIME ZONE', 'SCHEMA').first["TimeZone"]
+ initialize_type_map
+ @local_tz = execute("SHOW TIME ZONE", "SCHEMA").first["TimeZone"]
@use_insert_returning = @config.key?(:insert_returning) ? self.class.type_cast_config_to_boolean(@config[:insert_returning]) : true
end
- # Clears the prepared statements cache.
- def clear_cache!
- @statements.clear
- end
-
- def truncate(table_name, name = nil)
- exec_query "TRUNCATE TABLE #{quote_table_name(table_name)}", name, []
- end
-
# Is this connection alive and ready for queries?
def active?
- @connection.query 'SELECT 1'
+ @lock.synchronize do
+ @connection.query "SELECT 1"
+ end
true
- rescue PGError
+ rescue PG::Error
false
end
# Close then reopen the connection.
def reconnect!
- super
- @connection.reset
- configure_connection
+ @lock.synchronize do
+ super
+ @connection.reset
+ configure_connection
+ rescue PG::ConnectionBad
+ connect
+ end
end
def reset!
- clear_cache!
- reset_transaction
- unless @connection.transaction_status == ::PG::PQTRANS_IDLE
- @connection.query 'ROLLBACK'
+ @lock.synchronize do
+ clear_cache!
+ reset_transaction
+ unless @connection.transaction_status == ::PG::PQTRANS_IDLE
+ @connection.query "ROLLBACK"
+ end
+ @connection.query "DISCARD ALL"
+ configure_connection
end
- @connection.query 'DISCARD ALL'
- configure_connection
end
# Disconnects from the database if already connected. Otherwise, this
# method does nothing.
def disconnect!
- super
- @connection.close rescue nil
- end
-
- def native_database_types #:nodoc:
- NATIVE_DATABASE_TYPES
+ @lock.synchronize do
+ super
+ @connection.close rescue nil
+ end
end
- # Returns true, since this connection adapter supports migrations.
- def supports_migrations?
- true
+ def discard! # :nodoc:
+ @connection.socket_io.reopen(IO::NULL) rescue nil
+ @connection = nil
end
- # Does PostgreSQL support finding primary key on non-Active Record tables?
- def supports_primary_key? #:nodoc:
- true
+ def native_database_types #:nodoc:
+ NATIVE_DATABASE_TYPES
end
def set_standard_conforming_strings
- execute('SET standard_conforming_strings = on', 'SCHEMA')
+ execute("SET standard_conforming_strings = on", "SCHEMA")
end
def supports_ddl_transactions?
@@ -301,27 +330,46 @@ module ActiveRecord
true
end
- # Range datatypes weren't introduced until PostgreSQL 9.2
def supports_ranges?
- postgresql_version >= 90200
+ true
end
+ deprecate :supports_ranges?
def supports_materialized_views?
- postgresql_version >= 90300
+ true
+ end
+
+ def supports_foreign_tables?
+ true
+ end
+
+ def supports_pgcrypto_uuid?
+ postgresql_version >= 90400
+ end
+
+ def supports_optimizer_hints?
+ unless defined?(@has_pg_hint_plan)
+ @has_pg_hint_plan = extension_available?("pg_hint_plan")
+ end
+ @has_pg_hint_plan
+ end
+
+ def supports_lazy_transactions?
+ true
end
def get_advisory_lock(lock_id) # :nodoc:
unless lock_id.is_a?(Integer) && lock_id.bit_length <= 63
- raise(ArgumentError, "Postgres requires advisory lock ids to be a signed 64 bit integer")
+ raise(ArgumentError, "PostgreSQL requires advisory lock ids to be a signed 64 bit integer")
end
- select_value("SELECT pg_try_advisory_lock(#{lock_id});")
+ query_value("SELECT pg_try_advisory_lock(#{lock_id})")
end
def release_advisory_lock(lock_id) # :nodoc:
unless lock_id.is_a?(Integer) && lock_id.bit_length <= 63
- raise(ArgumentError, "Postgres requires advisory lock ids to be a signed 64 bit integer")
+ raise(ArgumentError, "PostgreSQL requires advisory lock ids to be a signed 64 bit integer")
end
- select_value("SELECT pg_advisory_unlock(#{lock_id})")
+ query_value("SELECT pg_advisory_unlock(#{lock_id})")
end
def enable_extension(name)
@@ -336,50 +384,35 @@ module ActiveRecord
}
end
+ def extension_available?(name)
+ query_value("SELECT true FROM pg_available_extensions WHERE name = #{quote(name)}", "SCHEMA")
+ end
+
def extension_enabled?(name)
- if supports_extensions?
- res = exec_query "SELECT EXISTS(SELECT * FROM pg_available_extensions WHERE name = '#{name}' AND installed_version IS NOT NULL) as enabled",
- 'SCHEMA'
- res.cast_values.first
- end
+ query_value("SELECT installed_version IS NOT NULL FROM pg_available_extensions WHERE name = #{quote(name)}", "SCHEMA")
end
def extensions
- if supports_extensions?
- exec_query("SELECT extname from pg_extension", "SCHEMA").cast_values
- else
- super
- end
+ exec_query("SELECT extname FROM pg_extension", "SCHEMA").cast_values
end
# Returns the configured supported identifier length supported by PostgreSQL
- def table_alias_length
- @table_alias_length ||= query('SHOW max_identifier_length', 'SCHEMA')[0][0].to_i
+ def max_identifier_length
+ @max_identifier_length ||= query_value("SHOW max_identifier_length", "SCHEMA").to_i
end
+ alias table_alias_length max_identifier_length
+ alias index_name_length max_identifier_length
# Set the authorized user for this session
def session_auth=(user)
clear_cache!
- exec_query "SET SESSION AUTHORIZATION #{user}"
+ execute("SET SESSION AUTHORIZATION #{user}")
end
def use_insert_returning?
@use_insert_returning
end
- def valid_type?(type)
- !native_database_types[type].nil?
- end
-
- def update_table_definition(table_name, base) #:nodoc:
- PostgreSQL::Table.new(table_name, base)
- end
-
- def lookup_cast_type(sql_type) # :nodoc:
- oid = execute("SELECT #{quote(sql_type)}::regtype::oid", "SCHEMA").first['oid'].to_i
- super(oid)
- end
-
def column_name_for_operation(operation, node) # :nodoc:
OPERATION_ALIASES.fetch(operation) { operation.downcase }
end
@@ -395,86 +428,130 @@ module ActiveRecord
@connection.server_version
end
- protected
+ def default_index_type?(index) # :nodoc:
+ index.using == :btree || super
+ end
+
+ def build_insert_sql(insert) # :nodoc:
+ sql = +"INSERT #{insert.into} #{insert.values_list}"
- # See http://www.postgresql.org/docs/current/static/errcodes-appendix.html
+ if insert.skip_duplicates?
+ sql << " ON CONFLICT #{insert.conflict_target} DO NOTHING"
+ elsif insert.update_duplicates?
+ sql << " ON CONFLICT #{insert.conflict_target} DO UPDATE SET "
+ sql << insert.updatable_columns.map { |column| "#{column}=excluded.#{column}" }.join(",")
+ end
+
+ sql << " RETURNING #{insert.returning}" if insert.returning
+ sql
+ end
+
+ private
+ def check_version
+ if postgresql_version < 90300
+ raise "Your version of PostgreSQL (#{postgresql_version}) is too old. Active Record supports PostgreSQL >= 9.3."
+ end
+ end
+
+ # See https://www.postgresql.org/docs/current/static/errcodes-appendix.html
+ VALUE_LIMIT_VIOLATION = "22001"
+ NUMERIC_VALUE_OUT_OF_RANGE = "22003"
+ NOT_NULL_VIOLATION = "23502"
FOREIGN_KEY_VIOLATION = "23503"
UNIQUE_VIOLATION = "23505"
+ SERIALIZATION_FAILURE = "40001"
+ DEADLOCK_DETECTED = "40P01"
+ LOCK_NOT_AVAILABLE = "55P03"
+ QUERY_CANCELED = "57014"
- def translate_exception(exception, message)
+ def translate_exception(exception, message:, sql:, binds:)
return exception unless exception.respond_to?(:result)
- case exception.result.try(:error_field, PGresult::PG_DIAG_SQLSTATE)
+ case exception.result.try(:error_field, PG::PG_DIAG_SQLSTATE)
when UNIQUE_VIOLATION
- RecordNotUnique.new(message)
+ RecordNotUnique.new(message, sql: sql, binds: binds)
when FOREIGN_KEY_VIOLATION
- InvalidForeignKey.new(message)
+ InvalidForeignKey.new(message, sql: sql, binds: binds)
+ when VALUE_LIMIT_VIOLATION
+ ValueTooLong.new(message, sql: sql, binds: binds)
+ when NUMERIC_VALUE_OUT_OF_RANGE
+ RangeError.new(message, sql: sql, binds: binds)
+ when NOT_NULL_VIOLATION
+ NotNullViolation.new(message, sql: sql, binds: binds)
+ when SERIALIZATION_FAILURE
+ SerializationFailure.new(message, sql: sql, binds: binds)
+ when DEADLOCK_DETECTED
+ Deadlocked.new(message, sql: sql, binds: binds)
+ when LOCK_NOT_AVAILABLE
+ LockWaitTimeout.new(message, sql: sql, binds: binds)
+ when QUERY_CANCELED
+ QueryCanceled.new(message, sql: sql, binds: binds)
else
super
end
end
- private
-
- def get_oid_type(oid, fmod, column_name, sql_type = '') # :nodoc:
+ def get_oid_type(oid, fmod, column_name, sql_type = "")
if !type_map.key?(oid)
- load_additional_types(type_map, [oid])
+ load_additional_types([oid])
end
type_map.fetch(oid, fmod, sql_type) {
warn "unknown OID #{oid}: failed to recognize type of '#{column_name}'. It will be treated as String."
- Type::Value.new.tap do |cast_type|
+ Type.default_value.tap do |cast_type|
type_map.register_type(oid, cast_type)
end
}
end
- def initialize_type_map(m) # :nodoc:
- register_class_with_limit m, 'int2', Type::Integer
- register_class_with_limit m, 'int4', Type::Integer
- register_class_with_limit m, 'int8', Type::Integer
- m.alias_type 'oid', 'int2'
- m.register_type 'float4', Type::Float.new
- m.alias_type 'float8', 'float4'
- m.register_type 'text', Type::Text.new
- register_class_with_limit m, 'varchar', Type::String
- m.alias_type 'char', 'varchar'
- m.alias_type 'name', 'varchar'
- m.alias_type 'bpchar', 'varchar'
- m.register_type 'bool', Type::Boolean.new
- register_class_with_limit m, 'bit', OID::Bit
- register_class_with_limit m, 'varbit', OID::BitVarying
- m.alias_type 'timestamptz', 'timestamp'
- m.register_type 'date', Type::Date.new
-
- m.register_type 'money', OID::Money.new
- m.register_type 'bytea', OID::Bytea.new
- m.register_type 'point', OID::Point.new
- m.register_type 'hstore', OID::Hstore.new
- m.register_type 'json', OID::Json.new
- m.register_type 'jsonb', OID::Jsonb.new
- m.register_type 'cidr', OID::Cidr.new
- m.register_type 'inet', OID::Inet.new
- m.register_type 'uuid', OID::Uuid.new
- m.register_type 'xml', OID::Xml.new
- m.register_type 'tsvector', OID::SpecializedString.new(:tsvector)
- m.register_type 'macaddr', OID::SpecializedString.new(:macaddr)
- m.register_type 'citext', OID::SpecializedString.new(:citext)
- m.register_type 'ltree', OID::SpecializedString.new(:ltree)
- m.register_type 'line', OID::SpecializedString.new(:line)
- m.register_type 'lseg', OID::SpecializedString.new(:lseg)
- m.register_type 'box', OID::SpecializedString.new(:box)
- m.register_type 'path', OID::SpecializedString.new(:path)
- m.register_type 'polygon', OID::SpecializedString.new(:polygon)
- m.register_type 'circle', OID::SpecializedString.new(:circle)
-
- # FIXME: why are we keeping these types as strings?
- m.alias_type 'interval', 'varchar'
-
- register_class_with_precision m, 'time', Type::Time
- register_class_with_precision m, 'timestamp', OID::DateTime
-
- m.register_type 'numeric' do |_, fmod, sql_type|
+ def initialize_type_map(m = type_map)
+ m.register_type "int2", Type::Integer.new(limit: 2)
+ m.register_type "int4", Type::Integer.new(limit: 4)
+ m.register_type "int8", Type::Integer.new(limit: 8)
+ m.register_type "oid", OID::Oid.new
+ m.register_type "float4", Type::Float.new
+ m.alias_type "float8", "float4"
+ m.register_type "text", Type::Text.new
+ register_class_with_limit m, "varchar", Type::String
+ m.alias_type "char", "varchar"
+ m.alias_type "name", "varchar"
+ m.alias_type "bpchar", "varchar"
+ m.register_type "bool", Type::Boolean.new
+ register_class_with_limit m, "bit", OID::Bit
+ register_class_with_limit m, "varbit", OID::BitVarying
+ m.alias_type "timestamptz", "timestamp"
+ m.register_type "date", OID::Date.new
+
+ m.register_type "money", OID::Money.new
+ m.register_type "bytea", OID::Bytea.new
+ m.register_type "point", OID::Point.new
+ m.register_type "hstore", OID::Hstore.new
+ m.register_type "json", Type::Json.new
+ m.register_type "jsonb", OID::Jsonb.new
+ m.register_type "cidr", OID::Cidr.new
+ m.register_type "inet", OID::Inet.new
+ m.register_type "uuid", OID::Uuid.new
+ m.register_type "xml", OID::Xml.new
+ m.register_type "tsvector", OID::SpecializedString.new(:tsvector)
+ m.register_type "macaddr", OID::SpecializedString.new(:macaddr)
+ m.register_type "citext", OID::SpecializedString.new(:citext)
+ m.register_type "ltree", OID::SpecializedString.new(:ltree)
+ m.register_type "line", OID::SpecializedString.new(:line)
+ m.register_type "lseg", OID::SpecializedString.new(:lseg)
+ m.register_type "box", OID::SpecializedString.new(:box)
+ m.register_type "path", OID::SpecializedString.new(:path)
+ m.register_type "polygon", OID::SpecializedString.new(:polygon)
+ m.register_type "circle", OID::SpecializedString.new(:circle)
+
+ m.register_type "interval" do |_, _, sql_type|
+ precision = extract_precision(sql_type)
+ OID::SpecializedString.new(:interval, precision: precision)
+ end
+
+ register_class_with_precision m, "time", Type::Time
+ register_class_with_precision m, "timestamp", OID::DateTime
+
+ m.register_type "numeric" do |_, fmod, sql_type|
precision = extract_precision(sql_type)
scale = extract_scale(sql_type)
@@ -494,78 +571,60 @@ module ActiveRecord
end
end
- load_additional_types(m)
- end
-
- def extract_limit(sql_type) # :nodoc:
- case sql_type
- when /^bigint/i, /^int8/i
- 8
- when /^smallint/i
- 2
- else
- super
- end
+ load_additional_types
end
# Extracts the value from a PostgreSQL column default definition.
- def extract_value_from_default(default) # :nodoc:
+ def extract_value_from_default(default)
case default
# Quoted types
- when /\A[\(B]?'(.*)'.*::"?([\w. ]+)"?(?:\[\])?\z/m
- # The default 'now'::date is CURRENT_DATE
- if $1 == "now".freeze && $2 == "date".freeze
- nil
- else
- $1.gsub("''".freeze, "'".freeze)
- end
+ when /\A[\(B]?'(.*)'.*::"?([\w. ]+)"?(?:\[\])?\z/m
+ # The default 'now'::date is CURRENT_DATE
+ if $1 == "now" && $2 == "date"
+ nil
+ else
+ $1.gsub("''", "'")
+ end
# Boolean types
- when 'true'.freeze, 'false'.freeze
- default
+ when "true", "false"
+ default
# Numeric types
- when /\A\(?(-?\d+(\.\d*)?)\)?(::bigint)?\z/
- $1
+ when /\A\(?(-?\d+(\.\d*)?)\)?(::bigint)?\z/
+ $1
# Object identifier types
- when /\A-?\d+\z/
- $1
- else
- # Anything else is blank, some user type, or some function
- # and we can't know the value of that, so return nil.
- nil
+ when /\A-?\d+\z/
+ $1
+ else
+ # Anything else is blank, some user type, or some function
+ # and we can't know the value of that, so return nil.
+ nil
end
end
- def extract_default_function(default_value, default) # :nodoc:
+ def extract_default_function(default_value, default)
default if has_default_function?(default_value, default)
end
- def has_default_function?(default_value, default) # :nodoc:
- !default_value && (%r{\w+\(.*\)|\(.*\)::\w+} === default)
+ def has_default_function?(default_value, default)
+ !default_value && %r{\w+\(.*\)|\(.*\)::\w+|CURRENT_DATE|CURRENT_TIMESTAMP}.match?(default)
end
- def load_additional_types(type_map, oids = nil) # :nodoc:
+ def load_additional_types(oids = nil)
initializer = OID::TypeMapInitializer.new(type_map)
- if supports_ranges?
- query = <<-SQL
- SELECT t.oid, t.typname, t.typelem, t.typdelim, t.typinput, r.rngsubtype, t.typtype, t.typbasetype
- FROM pg_type as t
- LEFT JOIN pg_range as r ON oid = rngtypid
- SQL
- else
- query = <<-SQL
- SELECT t.oid, t.typname, t.typelem, t.typdelim, t.typinput, t.typtype, t.typbasetype
- FROM pg_type as t
- SQL
- end
+ query = <<~SQL
+ SELECT t.oid, t.typname, t.typelem, t.typdelim, t.typinput, r.rngsubtype, t.typtype, t.typbasetype
+ FROM pg_type as t
+ LEFT JOIN pg_range as r ON oid = rngtypid
+ SQL
if oids
query += "WHERE t.oid::integer IN (%s)" % oids.join(", ")
else
- query += initializer.query_conditions_for_initial_load(type_map)
+ query += initializer.query_conditions_for_initial_load
end
- execute_and_clear(query, 'SCHEMA', []) do |records|
+ execute_and_clear(query, "SCHEMA", []) do |records|
initializer.run(records)
end
end
@@ -573,6 +632,10 @@ module ActiveRecord
FEATURE_NOT_SUPPORTED = "0A000" #:nodoc:
def execute_and_clear(sql, name, binds, prepare: false)
+ if preventing_writes? && write_query?(sql)
+ raise ActiveRecord::ReadOnlyError, "Write query attempted while in readonly mode: #{sql}"
+ end
+
if without_prepared_statement?(binds)
result = exec_no_cache(sql, name, [])
elsif !prepare
@@ -586,16 +649,31 @@ module ActiveRecord
end
def exec_no_cache(sql, name, binds)
- type_casted_binds = binds.map { |attr| type_cast(attr.value_for_database) }
- log(sql, name, binds) { @connection.async_exec(sql, type_casted_binds) }
+ materialize_transactions
+
+ # make sure we carry over any changes to ActiveRecord::Base.default_timezone that have been
+ # made since we established the connection
+ update_typemap_for_default_timezone
+
+ type_casted_binds = type_casted_binds(binds)
+ log(sql, name, binds, type_casted_binds) do
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ @connection.exec_params(sql, type_casted_binds)
+ end
+ end
end
def exec_cache(sql, name, binds)
- stmt_key = prepare_statement(sql)
- type_casted_binds = binds.map { |attr| type_cast(attr.value_for_database) }
+ materialize_transactions
+ update_typemap_for_default_timezone
+
+ stmt_key = prepare_statement(sql, binds)
+ type_casted_binds = type_casted_binds(binds)
- log(sql, name, binds, stmt_key) do
- @connection.exec_prepared(stmt_key, type_casted_binds)
+ log(sql, name, binds, type_casted_binds, stmt_key) do
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ @connection.exec_prepared(stmt_key, type_casted_binds)
+ end
end
rescue ActiveRecord::StatementInvalid => e
raise unless is_cached_plan_failure?(e)
@@ -605,8 +683,10 @@ module ActiveRecord
if in_transaction?
raise ActiveRecord::PreparedStatementCacheExpired.new(e.cause.message)
else
- # outside of transactions we can simply flush this query and retry
- @statements.delete sql_key(sql)
+ @lock.synchronize do
+ # outside of transactions we can simply flush this query and retry
+ @statements.delete sql_key(sql)
+ end
retry
end
end
@@ -619,11 +699,11 @@ module ActiveRecord
# ActiveRecord::PreparedStatementCacheExpired
#
# Check here for more details:
- # http://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/backend/utils/cache/plancache.c#l573
- CACHED_PLAN_HEURISTIC = 'cached plan must not change result type'.freeze
+ # https://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/backend/utils/cache/plancache.c#l573
+ CACHED_PLAN_HEURISTIC = "cached plan must not change result type"
def is_cached_plan_failure?(e)
pgerror = e.cause
- code = pgerror.result.result_error_field(PGresult::PG_DIAG_SQLSTATE)
+ code = pgerror.result.result_error_field(PG::PG_DIAG_SQLSTATE)
code == FEATURE_NOT_SUPPORTED && pgerror.message.include?(CACHED_PLAN_HEURISTIC)
rescue
false
@@ -641,33 +721,31 @@ module ActiveRecord
# Prepare the statement if it hasn't been prepared, return
# the statement key.
- def prepare_statement(sql)
- sql_key = sql_key(sql)
- unless @statements.key? sql_key
- nextkey = @statements.next_key
- begin
- @connection.prepare nextkey, sql
- rescue => e
- raise translate_exception_class(e, sql)
+ def prepare_statement(sql, binds)
+ @lock.synchronize do
+ sql_key = sql_key(sql)
+ unless @statements.key? sql_key
+ nextkey = @statements.next_key
+ begin
+ @connection.prepare nextkey, sql
+ rescue => e
+ raise translate_exception_class(e, sql, binds)
+ end
+ # Clear the queue
+ @connection.get_last_result
+ @statements[sql_key] = nextkey
end
- # Clear the queue
- @connection.get_last_result
- @statements[sql_key] = nextkey
+ @statements[sql_key]
end
- @statements[sql_key]
end
# Connects to a PostgreSQL server and sets up the adapter depending on the
# connected server's characteristics.
def connect
- @connection = PGconn.connect(@connection_parameters)
+ @connection = PG.connect(@connection_parameters)
configure_connection
- rescue ::PG::Error => error
- if error.message.include?("does not exist")
- raise ActiveRecord::NoDatabaseError
- else
- raise
- end
+ add_pg_encoders
+ add_pg_decoders
end
# Configures the encoding, verbosity, schema search path, and time zone of the connection.
@@ -676,43 +754,40 @@ module ActiveRecord
if @config[:encoding]
@connection.set_client_encoding(@config[:encoding])
end
- self.client_min_messages = @config[:min_messages] || 'warning'
+ self.client_min_messages = @config[:min_messages] || "warning"
self.schema_search_path = @config[:schema_search_path] || @config[:schema_order]
# Use standard-conforming strings so we don't have to do the E'...' dance.
set_standard_conforming_strings
+ variables = @config.fetch(:variables, {}).stringify_keys
+
# If using Active Record's time zone support configure the connection to return
# TIMESTAMP WITH ZONE types in UTC.
- # (SET TIME ZONE does not use an equals sign like other SET variables)
- if ActiveRecord::Base.default_timezone == :utc
- execute("SET time zone 'UTC'", 'SCHEMA')
- elsif @local_tz
- execute("SET time zone '#{@local_tz}'", 'SCHEMA')
+ unless variables["timezone"]
+ if ActiveRecord::Base.default_timezone == :utc
+ variables["timezone"] = "UTC"
+ elsif @local_tz
+ variables["timezone"] = @local_tz
+ end
end
# SET statements from :variables config hash
- # http://www.postgresql.org/docs/current/static/sql-set.html
- variables = @config[:variables] || {}
+ # https://www.postgresql.org/docs/current/static/sql-set.html
variables.map do |k, v|
- if v == ':default' || v == :default
+ if v == ":default" || v == :default
# Sets the value to the global or compile default
- execute("SET SESSION #{k} TO DEFAULT", 'SCHEMA')
+ execute("SET SESSION #{k} TO DEFAULT", "SCHEMA")
elsif !v.nil?
- execute("SET SESSION #{k} TO #{quote(v)}", 'SCHEMA')
+ execute("SET SESSION #{k} TO #{quote(v)}", "SCHEMA")
end
end
end
- # Returns the current ID of a table's sequence.
- def last_insert_id_result(sequence_name) # :nodoc:
- exec_query("SELECT currval('#{sequence_name}')", 'SQL')
- end
-
# Returns the list of a table's column names, data types, and default values.
#
# The underlying query is roughly:
- # SELECT column.name, column.type, default.value
+ # SELECT column.name, column.type, default.value, column.comment
# FROM column LEFT JOIN default
# ON column.table_id = default.table_id
# AND column.num = default.column_num
@@ -727,41 +802,50 @@ module ActiveRecord
# Query implementation notes:
# - format_type includes the column size constraint, e.g. varchar(50)
# - ::regclass is a function that gives the id for a table name
- def column_definitions(table_name) # :nodoc:
- query(<<-end_sql, 'SCHEMA')
+ def column_definitions(table_name)
+ query(<<~SQL, "SCHEMA")
SELECT a.attname, format_type(a.atttypid, a.atttypmod),
pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod,
- (SELECT c.collname FROM pg_collation c, pg_type t
- WHERE c.oid = a.attcollation AND t.oid = a.atttypid AND a.attcollation <> t.typcollation)
- FROM pg_attribute a LEFT JOIN pg_attrdef d
- ON a.attrelid = d.adrelid AND a.attnum = d.adnum
- WHERE a.attrelid = '#{quote_table_name(table_name)}'::regclass
+ c.collname, col_description(a.attrelid, a.attnum) AS comment
+ FROM pg_attribute a
+ LEFT JOIN pg_attrdef d ON a.attrelid = d.adrelid AND a.attnum = d.adnum
+ LEFT JOIN pg_type t ON a.atttypid = t.oid
+ LEFT JOIN pg_collation c ON a.attcollation = c.oid AND a.attcollation <> t.typcollation
+ WHERE a.attrelid = #{quote(quote_table_name(table_name))}::regclass
AND a.attnum > 0 AND NOT a.attisdropped
ORDER BY a.attnum
- end_sql
+ SQL
end
- def extract_table_ref_from_insert_sql(sql) # :nodoc:
+ def extract_table_ref_from_insert_sql(sql)
sql[/into\s("[A-Za-z0-9_."\[\]\s]+"|[A-Za-z0-9_."\[\]]+)\s*/im]
$1.strip if $1
end
- def create_table_definition(name, temporary = false, options = nil, as = nil) # :nodoc:
- PostgreSQL::TableDefinition.new(name, temporary, options, as)
+ def arel_visitor
+ Arel::Visitors::PostgreSQL.new(self)
+ end
+
+ def build_statement_pool
+ StatementPool.new(@connection, self.class.type_cast_config_to_integer(@config[:statement_limit]))
end
def can_perform_case_insensitive_comparison_for?(column)
@case_insensitive_cache ||= {}
@case_insensitive_cache[column.sql_type] ||= begin
- sql = <<-end_sql
+ sql = <<~SQL
SELECT exists(
SELECT * FROM pg_proc
+ WHERE proname = 'lower'
+ AND proargtypes = ARRAY[#{quote column.sql_type}::regtype]::oidvector
+ ) OR exists(
+ SELECT * FROM pg_proc
INNER JOIN pg_cast
- ON casttarget::text::oidvector = proargtypes
+ ON ARRAY[casttarget]::oidvector = proargtypes
WHERE proname = 'lower'
- AND castsource = '#{column.sql_type}'::regtype::oid
+ AND castsource = #{quote column.sql_type}::regtype
)
- end_sql
+ SQL
execute_and_clear(sql, "SCHEMA", []) do |result|
result.getvalue(0, 0)
end
@@ -773,40 +857,65 @@ module ActiveRecord
map[Integer] = PG::TextEncoder::Integer.new
map[TrueClass] = PG::TextEncoder::Boolean.new
map[FalseClass] = PG::TextEncoder::Boolean.new
- map[Float] = PG::TextEncoder::Float.new
@connection.type_map_for_queries = map
end
+ def update_typemap_for_default_timezone
+ if @default_timezone != ActiveRecord::Base.default_timezone && @timestamp_decoder
+ decoder_class = ActiveRecord::Base.default_timezone == :utc ?
+ PG::TextDecoder::TimestampUtc :
+ PG::TextDecoder::TimestampWithoutTimeZone
+
+ @timestamp_decoder = decoder_class.new(@timestamp_decoder.to_h)
+ @connection.type_map_for_results.add_coder(@timestamp_decoder)
+ @default_timezone = ActiveRecord::Base.default_timezone
+ end
+ end
+
def add_pg_decoders
+ @default_timezone = nil
+ @timestamp_decoder = nil
+
coders_by_name = {
- 'int2' => PG::TextDecoder::Integer,
- 'int4' => PG::TextDecoder::Integer,
- 'int8' => PG::TextDecoder::Integer,
- 'oid' => PG::TextDecoder::Integer,
- 'float4' => PG::TextDecoder::Float,
- 'float8' => PG::TextDecoder::Float,
- 'bool' => PG::TextDecoder::Boolean,
+ "int2" => PG::TextDecoder::Integer,
+ "int4" => PG::TextDecoder::Integer,
+ "int8" => PG::TextDecoder::Integer,
+ "oid" => PG::TextDecoder::Integer,
+ "float4" => PG::TextDecoder::Float,
+ "float8" => PG::TextDecoder::Float,
+ "bool" => PG::TextDecoder::Boolean,
}
+
+ if defined?(PG::TextDecoder::TimestampUtc)
+ # Use native PG encoders available since pg-1.1
+ coders_by_name["timestamp"] = PG::TextDecoder::TimestampUtc
+ coders_by_name["timestamptz"] = PG::TextDecoder::TimestampWithTimeZone
+ end
+
known_coder_types = coders_by_name.keys.map { |n| quote(n) }
- query = <<-SQL % known_coder_types.join(", ")
+ query = <<~SQL % known_coder_types.join(", ")
SELECT t.oid, t.typname
FROM pg_type as t
WHERE t.typname IN (%s)
SQL
coders = execute_and_clear(query, "SCHEMA", []) do |result|
result
- .map { |row| construct_coder(row, coders_by_name[row['typname']]) }
+ .map { |row| construct_coder(row, coders_by_name[row["typname"]]) }
.compact
end
map = PG::TypeMapByOid.new
coders.each { |coder| map.add_coder(coder) }
@connection.type_map_for_results = map
+
+ # extract timestamp decoder for use in update_typemap_for_default_timezone
+ @timestamp_decoder = coders.find { |coder| coder.name == "timestamp" }
+ update_typemap_for_default_timezone
end
def construct_coder(row, coder_class)
return unless coder_class
- coder_class.new(oid: row['oid'].to_i, name: row['typname'])
+ coder_class.new(oid: row["oid"].to_i, name: row["typname"])
end
ActiveRecord::Type.add_modifier({ array: true }, OID::Array, adapter: :postgresql)
@@ -815,16 +924,16 @@ module ActiveRecord
ActiveRecord::Type.register(:bit_varying, OID::BitVarying, adapter: :postgresql)
ActiveRecord::Type.register(:binary, OID::Bytea, adapter: :postgresql)
ActiveRecord::Type.register(:cidr, OID::Cidr, adapter: :postgresql)
+ ActiveRecord::Type.register(:date, OID::Date, adapter: :postgresql)
ActiveRecord::Type.register(:datetime, OID::DateTime, adapter: :postgresql)
ActiveRecord::Type.register(:decimal, OID::Decimal, adapter: :postgresql)
ActiveRecord::Type.register(:enum, OID::Enum, adapter: :postgresql)
ActiveRecord::Type.register(:hstore, OID::Hstore, adapter: :postgresql)
ActiveRecord::Type.register(:inet, OID::Inet, adapter: :postgresql)
- ActiveRecord::Type.register(:json, OID::Json, adapter: :postgresql)
ActiveRecord::Type.register(:jsonb, OID::Jsonb, adapter: :postgresql)
ActiveRecord::Type.register(:money, OID::Money, adapter: :postgresql)
- ActiveRecord::Type.register(:point, OID::Rails51Point, adapter: :postgresql)
- ActiveRecord::Type.register(:legacy_point, OID::Point, adapter: :postgresql)
+ ActiveRecord::Type.register(:point, OID::Point, adapter: :postgresql)
+ ActiveRecord::Type.register(:legacy_point, OID::LegacyPoint, adapter: :postgresql)
ActiveRecord::Type.register(:uuid, OID::Uuid, adapter: :postgresql)
ActiveRecord::Type.register(:vector, OID::Vector, adapter: :postgresql)
ActiveRecord::Type.register(:xml, OID::Xml, adapter: :postgresql)
diff --git a/activerecord/lib/active_record/connection_adapters/schema_cache.rb b/activerecord/lib/active_record/connection_adapters/schema_cache.rb
index eee142378c..07453b4403 100644
--- a/activerecord/lib/active_record/connection_adapters/schema_cache.rb
+++ b/activerecord/lib/active_record/connection_adapters/schema_cache.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
class SchemaCache
@@ -11,6 +13,7 @@ module ActiveRecord
@columns_hash = {}
@primary_keys = {}
@data_sources = {}
+ @indexes = {}
end
def initialize_dup(other)
@@ -19,6 +22,25 @@ module ActiveRecord
@columns_hash = @columns_hash.dup
@primary_keys = @primary_keys.dup
@data_sources = @data_sources.dup
+ @indexes = @indexes.dup
+ end
+
+ def encode_with(coder)
+ coder["columns"] = @columns
+ coder["columns_hash"] = @columns_hash
+ coder["primary_keys"] = @primary_keys
+ coder["data_sources"] = @data_sources
+ coder["indexes"] = @indexes
+ coder["version"] = connection.migration_context.current_version
+ end
+
+ def init_with(coder)
+ @columns = coder["columns"]
+ @columns_hash = coder["columns_hash"]
+ @primary_keys = coder["primary_keys"]
+ @data_sources = coder["data_sources"]
+ @indexes = coder["indexes"] || {}
+ @version = coder["version"]
end
def primary_keys(table_name)
@@ -32,9 +54,6 @@ module ActiveRecord
@data_sources[name] = connection.data_source_exists?(name)
end
- alias table_exists? data_source_exists?
- deprecate :table_exists? => "use #data_source_exists? instead"
-
# Add internal cache for table with +table_name+.
def add(table_name)
@@ -42,14 +61,13 @@ module ActiveRecord
primary_keys(table_name)
columns(table_name)
columns_hash(table_name)
+ indexes(table_name)
end
end
def data_sources(name)
@data_sources[name]
end
- alias tables data_sources
- deprecate :tables => "use #data_sources instead"
# Get the columns for a table
def columns(table_name)
@@ -64,17 +82,27 @@ module ActiveRecord
}]
end
+ # Checks whether the columns hash is already cached for a table.
+ def columns_hash?(table_name)
+ @columns_hash.key?(table_name)
+ end
+
+ def indexes(table_name)
+ @indexes[table_name] ||= connection.indexes(table_name)
+ end
+
# Clears out internal caches
def clear!
@columns.clear
@columns_hash.clear
@primary_keys.clear
@data_sources.clear
+ @indexes.clear
@version = nil
end
def size
- [@columns, @columns_hash, @primary_keys, @data_sources].map(&:size).inject :+
+ [@columns, @columns_hash, @primary_keys, @data_sources].sum(&:size)
end
# Clear out internal caches for the data source +name+.
@@ -83,22 +111,21 @@ module ActiveRecord
@columns_hash.delete name
@primary_keys.delete name
@data_sources.delete name
+ @indexes.delete name
end
- alias clear_table_cache! clear_data_source_cache!
- deprecate :clear_table_cache! => "use #clear_data_source_cache! instead"
def marshal_dump
# if we get current version during initialization, it happens stack over flow.
- @version = ActiveRecord::Migrator.current_version
- [@version, @columns, @columns_hash, @primary_keys, @data_sources]
+ @version = connection.migration_context.current_version
+ [@version, @columns, @columns_hash, @primary_keys, @data_sources, @indexes]
end
def marshal_load(array)
- @version, @columns, @columns_hash, @primary_keys, @data_sources = array
+ @version, @columns, @columns_hash, @primary_keys, @data_sources, @indexes = array
+ @indexes = @indexes || {}
end
private
-
def prepare_data_sources
connection.data_sources.each { |source| @data_sources[source] = true }
end
diff --git a/activerecord/lib/active_record/connection_adapters/sql_type_metadata.rb b/activerecord/lib/active_record/connection_adapters/sql_type_metadata.rb
index ccb7e154ee..8489bcbf1d 100644
--- a/activerecord/lib/active_record/connection_adapters/sql_type_metadata.rb
+++ b/activerecord/lib/active_record/connection_adapters/sql_type_metadata.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
# :stopdoc:
module ConnectionAdapters
@@ -24,9 +26,9 @@ module ActiveRecord
protected
- def attributes_for_hash
- [self.class, sql_type, type, limit, precision, scale]
- end
+ def attributes_for_hash
+ [self.class, sql_type, type, limit, precision, scale]
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite3/database_statements.rb b/activerecord/lib/active_record/connection_adapters/sqlite3/database_statements.rb
new file mode 100644
index 0000000000..84dcae49b9
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/sqlite3/database_statements.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module SQLite3
+ module DatabaseStatements
+ private
+ def execute_batch(sql, name = nil)
+ if preventing_writes? && write_query?(sql)
+ raise ActiveRecord::ReadOnlyError, "Write query attempted while in readonly mode: #{sql}"
+ end
+
+ materialize_transactions
+
+ log(sql, name) do
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ @connection.execute_batch(sql)
+ end
+ end
+ end
+
+ def build_fixture_statements(fixture_set)
+ fixture_set.flat_map do |table_name, fixtures|
+ next if fixtures.empty?
+ fixtures.map { |fixture| build_fixture_sql([fixture], table_name) }
+ end.compact
+ end
+
+ def build_truncate_statements(*table_names)
+ truncate_tables = table_names.map do |table_name|
+ "DELETE FROM #{quote_table_name(table_name)}"
+ end
+ combine_multi_statements(truncate_tables)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite3/explain_pretty_printer.rb b/activerecord/lib/active_record/connection_adapters/sqlite3/explain_pretty_printer.rb
index a946f5ebd0..832fdfe5c4 100644
--- a/activerecord/lib/active_record/connection_adapters/sqlite3/explain_pretty_printer.rb
+++ b/activerecord/lib/active_record/connection_adapters/sqlite3/explain_pretty_printer.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module SQLite3
@@ -10,7 +12,7 @@ module ActiveRecord
#
def pp(result)
result.rows.map do |row|
- row.join('|')
+ row.join("|")
end.join("\n") + "\n"
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite3/quoting.rb b/activerecord/lib/active_record/connection_adapters/sqlite3/quoting.rb
index faf2f375dc..cb9d32a577 100644
--- a/activerecord/lib/active_record/connection_adapters/sqlite3/quoting.rb
+++ b/activerecord/lib/active_record/connection_adapters/sqlite3/quoting.rb
@@ -1,35 +1,66 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module SQLite3
module Quoting # :nodoc:
+ def quote_string(s)
+ @connection.class.quote(s)
+ end
+
+ def quote_table_name_for_assignment(table, attr)
+ quote_column_name(attr)
+ end
+
+ def quote_table_name(name)
+ @quoted_table_names[name] ||= super.gsub(".", "\".\"").freeze
+ end
+
def quote_column_name(name)
@quoted_column_names[name] ||= %Q("#{super.gsub('"', '""')}")
end
- private
+ def quoted_time(value)
+ value = value.change(year: 2000, month: 1, day: 1)
+ quoted_date(value).sub(/\A\d\d\d\d-\d\d-\d\d /, "2000-01-01 ")
+ end
- def _quote(value)
- if value.is_a?(Type::Binary::Data)
- "x'#{value.hex}'"
- else
- super
- end
+ def quoted_binary(value)
+ "x'#{value.hex}'"
end
- def _type_cast(value)
- case value
- when BigDecimal
- value.to_f
- when String
- if value.encoding == Encoding::ASCII_8BIT
- super(value.encode(Encoding::UTF_8))
+ def quoted_true
+ "1"
+ end
+
+ def unquoted_true
+ 1
+ end
+
+ def quoted_false
+ "0"
+ end
+
+ def unquoted_false
+ 0
+ end
+
+ private
+
+ def _type_cast(value)
+ case value
+ when BigDecimal
+ value.to_f
+ when String
+ if value.encoding == Encoding::ASCII_8BIT
+ super(value.encode(Encoding::UTF_8))
+ else
+ super
+ end
else
super
end
- else
- super
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite3/schema_creation.rb b/activerecord/lib/active_record/connection_adapters/sqlite3/schema_creation.rb
index fe1dcbd710..b842561317 100644
--- a/activerecord/lib/active_record/connection_adapters/sqlite3/schema_creation.rb
+++ b/activerecord/lib/active_record/connection_adapters/sqlite3/schema_creation.rb
@@ -1,7 +1,9 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
module SQLite3
- class SchemaCreation < AbstractAdapter::SchemaCreation
+ class SchemaCreation < AbstractAdapter::SchemaCreation # :nodoc:
private
def add_column_options!(sql, options)
if options[:collation]
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite3/schema_definitions.rb b/activerecord/lib/active_record/connection_adapters/sqlite3/schema_definitions.rb
new file mode 100644
index 0000000000..c9855019c1
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/sqlite3/schema_definitions.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module SQLite3
+ class TableDefinition < ActiveRecord::ConnectionAdapters::TableDefinition
+ def references(*args, **options)
+ super(*args, type: :integer, **options)
+ end
+ alias :belongs_to :references
+
+ private
+ def integer_like_primary_key_type(type, options)
+ :primary_key
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite3/schema_dumper.rb b/activerecord/lib/active_record/connection_adapters/sqlite3/schema_dumper.rb
new file mode 100644
index 0000000000..621678ec65
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/sqlite3/schema_dumper.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module SQLite3
+ class SchemaDumper < ConnectionAdapters::SchemaDumper # :nodoc:
+ private
+ def default_primary_key?(column)
+ schema_type(column) == :integer
+ end
+
+ def explicit_primary_key_default?(column)
+ column.bigint?
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite3/schema_statements.rb b/activerecord/lib/active_record/connection_adapters/sqlite3/schema_statements.rb
new file mode 100644
index 0000000000..e64e995e1a
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/sqlite3/schema_statements.rb
@@ -0,0 +1,137 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module SQLite3
+ module SchemaStatements # :nodoc:
+ # Returns an array of indexes for the given table.
+ def indexes(table_name)
+ exec_query("PRAGMA index_list(#{quote_table_name(table_name)})", "SCHEMA").map do |row|
+ # Indexes SQLite creates implicitly for internal use start with "sqlite_".
+ # See https://www.sqlite.org/fileformat2.html#intschema
+ next if row["name"].starts_with?("sqlite_")
+
+ index_sql = query_value(<<~SQL, "SCHEMA")
+ SELECT sql
+ FROM sqlite_master
+ WHERE name = #{quote(row['name'])} AND type = 'index'
+ UNION ALL
+ SELECT sql
+ FROM sqlite_temp_master
+ WHERE name = #{quote(row['name'])} AND type = 'index'
+ SQL
+
+ /\bON\b\s*"?(\w+?)"?\s*\((?<expressions>.+?)\)(?:\s*WHERE\b\s*(?<where>.+))?\z/i =~ index_sql
+
+ columns = exec_query("PRAGMA index_info(#{quote(row['name'])})", "SCHEMA").map do |col|
+ col["name"]
+ end
+
+ orders = {}
+
+ if columns.any?(&:nil?) # index created with an expression
+ columns = expressions
+ else
+ # Add info on sort order for columns (only desc order is explicitly specified,
+ # asc is the default)
+ if index_sql # index_sql can be null in case of primary key indexes
+ index_sql.scan(/"(\w+)" DESC/).flatten.each { |order_column|
+ orders[order_column] = :desc
+ }
+ end
+ end
+
+ IndexDefinition.new(
+ table_name,
+ row["name"],
+ row["unique"] != 0,
+ columns,
+ where: where,
+ orders: orders
+ )
+ end.compact
+ end
+
+ def add_foreign_key(from_table, to_table, **options)
+ alter_table(from_table) do |definition|
+ to_table = strip_table_name_prefix_and_suffix(to_table)
+ definition.foreign_key(to_table, options)
+ end
+ end
+
+ def remove_foreign_key(from_table, to_table = nil, **options)
+ to_table ||= options[:to_table]
+ options = options.except(:name, :to_table)
+ foreign_keys = foreign_keys(from_table)
+
+ fkey = foreign_keys.detect do |fk|
+ table = to_table || begin
+ table = options[:column].to_s.delete_suffix("_id")
+ Base.pluralize_table_names ? table.pluralize : table
+ end
+ table = strip_table_name_prefix_and_suffix(table)
+ fk_to_table = strip_table_name_prefix_and_suffix(fk.to_table)
+ fk_to_table == table && options.all? { |k, v| fk.options[k].to_s == v.to_s }
+ end || raise(ArgumentError, "Table '#{from_table}' has no foreign key for #{to_table || options}")
+
+ foreign_keys.delete(fkey)
+ alter_table(from_table, foreign_keys)
+ end
+
+ def create_schema_dumper(options)
+ SQLite3::SchemaDumper.create(self, options)
+ end
+
+ private
+ def schema_creation
+ SQLite3::SchemaCreation.new(self)
+ end
+
+ def create_table_definition(*args)
+ SQLite3::TableDefinition.new(self, *args)
+ end
+
+ def new_column_from_field(table_name, field)
+ default = \
+ case field["dflt_value"]
+ when /^null$/i
+ nil
+ when /^'(.*)'$/m
+ $1.gsub("''", "'")
+ when /^"(.*)"$/m
+ $1.gsub('""', '"')
+ else
+ field["dflt_value"]
+ end
+
+ type_metadata = fetch_type_metadata(field["type"])
+ Column.new(field["name"], default, type_metadata, field["notnull"].to_i == 0, table_name, nil, field["collation"])
+ end
+
+ def data_source_sql(name = nil, type: nil)
+ scope = quoted_scope(name, type: type)
+ scope[:type] ||= "'table','view'"
+
+ sql = +"SELECT name FROM sqlite_master WHERE name <> 'sqlite_sequence'"
+ sql << " AND name = #{scope[:name]}" if scope[:name]
+ sql << " AND type IN (#{scope[:type]})"
+ sql
+ end
+
+ def quoted_scope(name = nil, type: nil)
+ type = \
+ case type
+ when "BASE TABLE"
+ "'table'"
+ when "VIEW"
+ "'view'"
+ end
+ scope = {}
+ scope[:name] = quote(name) if name
+ scope[:type] = type if type
+ scope
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb b/activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb
index 5c8e428bef..ff23a525b9 100644
--- a/activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb
+++ b/activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb
@@ -1,15 +1,23 @@
-require 'active_record/connection_adapters/abstract_adapter'
-require 'active_record/connection_adapters/statement_pool'
-require 'active_record/connection_adapters/sqlite3/explain_pretty_printer'
-require 'active_record/connection_adapters/sqlite3/quoting'
-require 'active_record/connection_adapters/sqlite3/schema_creation'
+# frozen_string_literal: true
-gem 'sqlite3', '~> 1.3.6'
-require 'sqlite3'
+require "active_record/connection_adapters/abstract_adapter"
+require "active_record/connection_adapters/statement_pool"
+require "active_record/connection_adapters/sqlite3/explain_pretty_printer"
+require "active_record/connection_adapters/sqlite3/quoting"
+require "active_record/connection_adapters/sqlite3/database_statements"
+require "active_record/connection_adapters/sqlite3/schema_creation"
+require "active_record/connection_adapters/sqlite3/schema_definitions"
+require "active_record/connection_adapters/sqlite3/schema_dumper"
+require "active_record/connection_adapters/sqlite3/schema_statements"
+
+gem "sqlite3", "~> 1.3", ">= 1.3.6"
+require "sqlite3"
module ActiveRecord
module ConnectionHandling # :nodoc:
def sqlite3_connection(config)
+ config = config.symbolize_keys
+
# Require database.
unless config[:database]
raise ArgumentError, "No database file specified. Missing argument: database"
@@ -18,7 +26,7 @@ module ActiveRecord
# Allow database path relative to Rails.root, but only if the database
# path is not the special path that tells sqlite to build a database only
# in memory.
- if ':memory:' != config[:database]
+ if ":memory:" != config[:database]
config[:database] = File.expand_path(config[:database], Rails.root) if defined?(Rails.root)
dirname = File.dirname(config[:database])
Dir.mkdir(dirname) unless File.directory?(dirname)
@@ -26,11 +34,9 @@ module ActiveRecord
db = SQLite3::Database.new(
config[:database].to_s,
- :results_as_hash => true
+ config.merge(results_as_hash: true)
)
- db.busy_timeout(ConnectionAdapters::SQLite3Adapter.type_cast_config_to_integer(config[:timeout])) if config[:timeout]
-
ConnectionAdapters::SQLite3Adapter.new(db, logger, nil, config)
rescue Errno::ENOENT => error
if error.message.include?("No such file or directory")
@@ -49,13 +55,14 @@ module ActiveRecord
#
# * <tt>:database</tt> - Path to the database file.
class SQLite3Adapter < AbstractAdapter
- ADAPTER_NAME = 'SQLite'.freeze
+ ADAPTER_NAME = "SQLite"
include SQLite3::Quoting
- include Savepoints
+ include SQLite3::SchemaStatements
+ include SQLite3::DatabaseStatements
NATIVE_DATABASE_TYPES = {
- primary_key: 'INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL',
+ primary_key: "integer PRIMARY KEY AUTOINCREMENT NOT NULL",
string: { name: "varchar" },
text: { name: "text" },
integer: { name: "integer" },
@@ -65,35 +72,30 @@ module ActiveRecord
time: { name: "time" },
date: { name: "date" },
binary: { name: "blob" },
- boolean: { name: "boolean" }
+ boolean: { name: "boolean" },
+ json: { name: "json" },
}
- class StatementPool < ConnectionAdapters::StatementPool
- private
-
- def dealloc(stmt)
- stmt[:stmt].close unless stmt[:stmt].closed?
+ def self.represent_boolean_as_integer=(value) # :nodoc:
+ if value == false
+ raise "`.represent_boolean_as_integer=` is now always true, so make sure your application can work with it and remove this settings."
end
+
+ ActiveSupport::Deprecation.warn(
+ "`.represent_boolean_as_integer=` is now always true, so setting this is deprecated and will be removed in Rails 6.1."
+ )
end
- def schema_creation # :nodoc:
- SQLite3::SchemaCreation.new self
+ class StatementPool < ConnectionAdapters::StatementPool # :nodoc:
+ private
+ def dealloc(stmt)
+ stmt.close unless stmt.closed?
+ end
end
def initialize(connection, logger, connection_options, config)
super(connection, logger, config)
-
- @active = nil
- @statements = StatementPool.new(self.class.type_cast_config_to_integer(config.fetch(:statement_limit) { 1000 }))
-
- @visitor = Arel::Visitors::SQLite.new self
-
- if self.class.type_cast_config_to_boolean(config.fetch(:prepared_statements) { true })
- @prepared_statements = true
- @visitor.extend(DetermineIfPreparableVisitor)
- else
- @prepared_statements = false
- end
+ configure_connection
end
def supports_ddl_transactions?
@@ -105,25 +107,18 @@ module ActiveRecord
end
def supports_partial_index?
- sqlite_version >= '3.8.0'
- end
-
- # Returns true, since this connection adapter supports prepared statement
- # caching.
- def supports_statement_cache?
true
end
- # Returns true, since this connection adapter supports migrations.
- def supports_migrations? #:nodoc:
- true
+ def supports_expression_index?
+ sqlite_version >= "3.9.0"
end
- def supports_primary_key? #:nodoc:
+ def requires_reloading?
true
end
- def requires_reloading?
+ def supports_foreign_keys?
true
end
@@ -135,29 +130,39 @@ module ActiveRecord
true
end
+ def supports_json?
+ true
+ end
+
+ def supports_insert_on_conflict?
+ sqlite_version >= "3.24.0"
+ end
+ alias supports_insert_on_duplicate_skip? supports_insert_on_conflict?
+ alias supports_insert_on_duplicate_update? supports_insert_on_conflict?
+ alias supports_insert_conflict_target? supports_insert_on_conflict?
+
def active?
- @active != false
+ !@connection.closed?
+ end
+
+ def reconnect!
+ super
+ connect if @connection.closed?
end
# Disconnects from the database if already connected. Otherwise, this
# method does nothing.
def disconnect!
super
- @active = false
@connection.close rescue nil
end
- # Clears the prepared statements cache.
- def clear_cache!
- @statements.clear
- end
-
def supports_index_sort_order?
true
end
# Returns 62. SQLite supports index names up to 64
- # characters. The rest is used by rails internally to perform
+ # characters. The rest is used by Rails internally to perform
# temporary rename operations
def allowed_index_name_length
index_name_length - 2
@@ -176,57 +181,79 @@ module ActiveRecord
true
end
- # QUOTING ==================================================
-
- def quote_string(s) #:nodoc:
- @connection.class.quote(s)
+ def supports_lazy_transactions?
+ true
end
- def quote_table_name_for_assignment(table, attr)
- quote_column_name(attr)
+ # REFERENTIAL INTEGRITY ====================================
+
+ def disable_referential_integrity # :nodoc:
+ old_foreign_keys = query_value("PRAGMA foreign_keys")
+ old_defer_foreign_keys = query_value("PRAGMA defer_foreign_keys")
+
+ begin
+ execute("PRAGMA defer_foreign_keys = ON")
+ execute("PRAGMA foreign_keys = OFF")
+ yield
+ ensure
+ execute("PRAGMA defer_foreign_keys = #{old_defer_foreign_keys}")
+ execute("PRAGMA foreign_keys = #{old_foreign_keys}")
+ end
end
#--
# DATABASE STATEMENTS ======================================
#++
+ READ_QUERY = ActiveRecord::ConnectionAdapters::AbstractAdapter.build_read_query_regexp(:begin, :commit, :explain, :select, :pragma, :release, :savepoint, :rollback) # :nodoc:
+ private_constant :READ_QUERY
+
+ def write_query?(sql) # :nodoc:
+ !READ_QUERY.match?(sql)
+ end
+
def explain(arel, binds = [])
sql = "EXPLAIN QUERY PLAN #{to_sql(arel, binds)}"
- SQLite3::ExplainPrettyPrinter.new.pp(exec_query(sql, 'EXPLAIN', []))
+ SQLite3::ExplainPrettyPrinter.new.pp(exec_query(sql, "EXPLAIN", []))
end
def exec_query(sql, name = nil, binds = [], prepare: false)
- type_casted_binds = binds.map { |attr| type_cast(attr.value_for_database) }
-
- log(sql, name, binds) do
- # Don't cache statements if they are not prepared
- unless prepare
- stmt = @connection.prepare(sql)
- begin
- cols = stmt.columns
- unless without_prepared_statement?(binds)
- stmt.bind_params(type_casted_binds)
+ if preventing_writes? && write_query?(sql)
+ raise ActiveRecord::ReadOnlyError, "Write query attempted while in readonly mode: #{sql}"
+ end
+
+ materialize_transactions
+
+ type_casted_binds = type_casted_binds(binds)
+
+ log(sql, name, binds, type_casted_binds) do
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ # Don't cache statements if they are not prepared
+ unless prepare
+ stmt = @connection.prepare(sql)
+ begin
+ cols = stmt.columns
+ unless without_prepared_statement?(binds)
+ stmt.bind_params(type_casted_binds)
+ end
+ records = stmt.to_a
+ ensure
+ stmt.close
end
+ else
+ stmt = @statements[sql] ||= @connection.prepare(sql)
+ cols = stmt.columns
+ stmt.reset!
+ stmt.bind_params(type_casted_binds)
records = stmt.to_a
- ensure
- stmt.close
end
- stmt = records
- else
- cache = @statements[sql] ||= {
- :stmt => @connection.prepare(sql)
- }
- stmt = cache[:stmt]
- cols = cache[:cols] ||= stmt.columns
- stmt.reset!
- stmt.bind_params(type_casted_binds)
- end
- ActiveRecord::Result.new(cols, stmt.to_a)
+ ActiveRecord::Result.new(cols, records)
+ end
end
end
- def exec_delete(sql, name = 'SQL', binds = [])
+ def exec_delete(sql, name = "SQL", binds = [])
exec_query(sql, name, binds)
@connection.changes
end
@@ -237,127 +264,36 @@ module ActiveRecord
end
def execute(sql, name = nil) #:nodoc:
- log(sql, name) { @connection.execute(sql) }
- end
+ if preventing_writes? && write_query?(sql)
+ raise ActiveRecord::ReadOnlyError, "Write query attempted while in readonly mode: #{sql}"
+ end
- def select_rows(sql, name = nil, binds = [])
- exec_query(sql, name, binds).rows
+ materialize_transactions
+
+ log(sql, name) do
+ ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
+ @connection.execute(sql)
+ end
+ end
end
def begin_db_transaction #:nodoc:
- log('begin transaction',nil) { @connection.transaction }
+ log("begin transaction", nil) { @connection.transaction }
end
def commit_db_transaction #:nodoc:
- log('commit transaction',nil) { @connection.commit }
+ log("commit transaction", nil) { @connection.commit }
end
def exec_rollback_db_transaction #:nodoc:
- log('rollback transaction',nil) { @connection.rollback }
+ log("rollback transaction", nil) { @connection.rollback }
end
# SCHEMA STATEMENTS ========================================
- def tables(name = nil) # :nodoc:
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
- #tables currently returns both tables and views.
- This behavior is deprecated and will be changed with Rails 5.1 to only return tables.
- Use #data_sources instead.
- MSG
-
- if name
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
- Passing arguments to #tables is deprecated without replacement.
- MSG
- end
-
- data_sources
- end
-
- def data_sources
- select_values("SELECT name FROM sqlite_master WHERE type IN ('table','view') AND name <> 'sqlite_sequence'", 'SCHEMA')
- end
-
- def table_exists?(table_name)
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
- #table_exists? currently checks both tables and views.
- This behavior is deprecated and will be changed with Rails 5.1 to only check tables.
- Use #data_source_exists? instead.
- MSG
-
- data_source_exists?(table_name)
- end
-
- def data_source_exists?(table_name)
- return false unless table_name.present?
-
- sql = "SELECT name FROM sqlite_master WHERE type IN ('table','view') AND name <> 'sqlite_sequence'"
- sql << " AND name = #{quote(table_name)}"
-
- select_values(sql, 'SCHEMA').any?
- end
-
- def views # :nodoc:
- select_values("SELECT name FROM sqlite_master WHERE type = 'view' AND name <> 'sqlite_sequence'", 'SCHEMA')
- end
-
- def view_exists?(view_name) # :nodoc:
- return false unless view_name.present?
-
- sql = "SELECT name FROM sqlite_master WHERE type = 'view' AND name <> 'sqlite_sequence'"
- sql << " AND name = #{quote(view_name)}"
-
- select_values(sql, 'SCHEMA').any?
- end
-
- # Returns an array of +Column+ objects for the table specified by +table_name+.
- def columns(table_name) # :nodoc:
- table_name = table_name.to_s
- table_structure(table_name).map do |field|
- case field["dflt_value"]
- when /^null$/i
- field["dflt_value"] = nil
- when /^'(.*)'$/m
- field["dflt_value"] = $1.gsub("''", "'")
- when /^"(.*)"$/m
- field["dflt_value"] = $1.gsub('""', '"')
- end
-
- collation = field['collation']
- sql_type = field['type']
- type_metadata = fetch_type_metadata(sql_type)
- new_column(field['name'], field['dflt_value'], type_metadata, field['notnull'].to_i == 0, table_name, nil, collation)
- end
- end
-
- # Returns an array of indexes for the given table.
- def indexes(table_name, name = nil) #:nodoc:
- exec_query("PRAGMA index_list(#{quote_table_name(table_name)})", 'SCHEMA').map do |row|
- sql = <<-SQL
- SELECT sql
- FROM sqlite_master
- WHERE name=#{quote(row['name'])} AND type='index'
- UNION ALL
- SELECT sql
- FROM sqlite_temp_master
- WHERE name=#{quote(row['name'])} AND type='index'
- SQL
- index_sql = exec_query(sql).first['sql']
- match = /\sWHERE\s+(.+)$/i.match(index_sql)
- where = match[1] if match
- IndexDefinition.new(
- table_name,
- row['name'],
- row['unique'] != 0,
- exec_query("PRAGMA index_info('#{row['name']}')", "SCHEMA").map { |col|
- col['name']
- }, nil, nil, where)
- end
- end
-
def primary_keys(table_name) # :nodoc:
- pks = table_structure(table_name).select { |f| f['pk'] > 0 }
- pks.sort_by { |f| f['pk'] }.map { |f| f['name'] }
+ pks = table_structure(table_name).select { |f| f["pk"] > 0 }
+ pks.sort_by { |f| f["pk"] }.map { |f| f["name"] }
end
def remove_index(table_name, options = {}) #:nodoc:
@@ -374,25 +310,22 @@ module ActiveRecord
rename_table_indexes(table_name, new_name)
end
- # See: http://www.sqlite.org/lang_altertable.html
- # SQLite has an additional restriction on the ALTER TABLE statement
- def valid_alter_table_type?(type)
- type.to_sym != :primary_key
- end
-
def add_column(table_name, column_name, type, options = {}) #:nodoc:
- if valid_alter_table_type?(type)
- super(table_name, column_name, type, options)
- else
+ if invalid_alter_table_type?(type, options)
alter_table(table_name) do |definition|
definition.column(column_name, type, options)
end
+ else
+ super
end
end
def remove_column(table_name, column_name, type = nil, options = {}) #:nodoc:
alter_table(table_name) do |definition|
definition.remove_column column_name
+ definition.foreign_keys.delete_if do |_, fk_options|
+ fk_options[:column] == column_name.to_s
+ end
end
end
@@ -415,14 +348,13 @@ module ActiveRecord
def change_column(table_name, column_name, type, options = {}) #:nodoc:
alter_table(table_name) do |definition|
- include_default = options_include_default?(options)
definition[column_name].instance_eval do
self.type = type
self.limit = options[:limit] if options.include?(:limit)
- self.default = options[:default] if include_default
+ self.default = options[:default] if options.include?(:default)
self.null = options[:null] if options.include?(:null)
self.precision = options[:precision] if options.include?(:precision)
- self.scale = options[:scale] if options.include?(:scale)
+ self.scale = options[:scale] if options.include?(:scale)
self.collation = options[:collation] if options.include?(:collation)
end
end
@@ -430,52 +362,123 @@ module ActiveRecord
def rename_column(table_name, column_name, new_column_name) #:nodoc:
column = column_for(table_name, column_name)
- alter_table(table_name, rename: {column.name => new_column_name.to_s})
+ alter_table(table_name, rename: { column.name => new_column_name.to_s })
rename_column_indexes(table_name, column.name, new_column_name)
end
- protected
+ def add_reference(table_name, ref_name, **options) # :nodoc:
+ super(table_name, ref_name, type: :integer, **options)
+ end
+ alias :add_belongs_to :add_reference
+
+ def foreign_keys(table_name)
+ fk_info = exec_query("PRAGMA foreign_key_list(#{quote(table_name)})", "SCHEMA")
+ fk_info.map do |row|
+ options = {
+ column: row["from"],
+ primary_key: row["to"],
+ on_delete: extract_foreign_key_action(row["on_delete"]),
+ on_update: extract_foreign_key_action(row["on_update"])
+ }
+ ForeignKeyDefinition.new(table_name, row["table"], options)
+ end
+ end
+
+ def build_insert_sql(insert) # :nodoc:
+ sql = +"INSERT #{insert.into} #{insert.values_list}"
+
+ if insert.skip_duplicates?
+ sql << " ON CONFLICT #{insert.conflict_target} DO NOTHING"
+ elsif insert.update_duplicates?
+ sql << " ON CONFLICT #{insert.conflict_target} DO UPDATE SET "
+ sql << insert.updatable_columns.map { |column| "#{column}=excluded.#{column}" }.join(",")
+ end
+
+ sql
+ end
+
+ private
+ # See https://www.sqlite.org/limits.html,
+ # the default value is 999 when not configured.
+ def bind_params_length
+ 999
+ end
+
+ def check_version
+ if sqlite_version < "3.8.0"
+ raise "Your version of SQLite (#{sqlite_version}) is too old. Active Record supports SQLite >= 3.8."
+ end
+ end
+
+ def initialize_type_map(m = type_map)
+ super
+ register_class_with_limit m, %r(int)i, SQLite3Integer
+ end
def table_structure(table_name)
- structure = exec_query("PRAGMA table_info(#{quote_table_name(table_name)})", 'SCHEMA')
+ structure = exec_query("PRAGMA table_info(#{quote_table_name(table_name)})", "SCHEMA")
raise(ActiveRecord::StatementInvalid, "Could not find table '#{table_name}'") if structure.empty?
table_structure_with_collation(table_name, structure)
end
+ alias column_definitions table_structure
- def alter_table(table_name, options = {}) #:nodoc:
+ # See: https://www.sqlite.org/lang_altertable.html
+ # SQLite has an additional restriction on the ALTER TABLE statement
+ def invalid_alter_table_type?(type, options)
+ type.to_sym == :primary_key || options[:primary_key]
+ end
+
+ def alter_table(table_name, foreign_keys = foreign_keys(table_name), **options)
altered_table_name = "a#{table_name}"
- caller = lambda {|definition| yield definition if block_given?}
+
+ caller = lambda do |definition|
+ rename = options[:rename] || {}
+ foreign_keys.each do |fk|
+ if column = rename[fk.options[:column]]
+ fk.options[:column] = column
+ end
+ to_table = strip_table_name_prefix_and_suffix(fk.to_table)
+ definition.foreign_key(to_table, fk.options)
+ end
+
+ yield definition if block_given?
+ end
transaction do
- move_table(table_name, altered_table_name,
- options.merge(:temporary => true))
- move_table(altered_table_name, table_name, &caller)
+ disable_referential_integrity do
+ move_table(table_name, altered_table_name, options.merge(temporary: true))
+ move_table(altered_table_name, table_name, &caller)
+ end
end
end
- def move_table(from, to, options = {}, &block) #:nodoc:
+ def move_table(from, to, options = {}, &block)
copy_table(from, to, options, &block)
drop_table(from)
end
- def copy_table(from, to, options = {}) #:nodoc:
+ def copy_table(from, to, options = {})
from_primary_key = primary_key(from)
options[:id] = false
create_table(to, options) do |definition|
@definition = definition
- @definition.primary_key(from_primary_key) if from_primary_key.present?
+ if from_primary_key.is_a?(Array)
+ @definition.primary_keys from_primary_key
+ end
columns(from).each do |column|
column_name = options[:rename] ?
(options[:rename][column.name] ||
options[:rename][column.name.to_sym] ||
column.name) : column.name
- next if column_name == from_primary_key
@definition.column(column_name, column.type,
- :limit => column.limit, :default => column.default,
- :precision => column.precision, :scale => column.scale,
- :null => column.null, collation: column.collation)
+ limit: column.limit, default: column.default,
+ precision: column.precision, scale: column.scale,
+ null: column.null, collation: column.collation,
+ primary_key: column_name == from_primary_key
+ )
end
+
yield @definition if block_given?
end
copy_table_indexes(from, to, options[:rename] || {})
@@ -484,7 +487,7 @@ module ActiveRecord
options[:rename] || {})
end
- def copy_table_indexes(from, to, rename = {}) #:nodoc:
+ def copy_table_indexes(from, to, rename = {})
indexes(from).each do |index|
name = index.name
if to == "a#{from}"
@@ -493,89 +496,132 @@ module ActiveRecord
name = name[1..-1]
end
- to_column_names = columns(to).map(&:name)
- columns = index.columns.map {|c| rename[c] || c }.select do |column|
- to_column_names.include?(column)
+ columns = index.columns
+ if columns.is_a?(Array)
+ to_column_names = columns(to).map(&:name)
+ columns = columns.map { |c| rename[c] || c }.select do |column|
+ to_column_names.include?(column)
+ end
end
unless columns.empty?
# index name can't be the same
opts = { name: name.gsub(/(^|_)(#{from})_/, "\\1#{to}_"), internal: true }
opts[:unique] = true if index.unique
+ opts[:where] = index.where if index.where
add_index(to, columns, opts)
end
end
end
- def copy_table_contents(from, to, columns, rename = {}) #:nodoc:
- column_mappings = Hash[columns.map {|name| [name, name]}]
+ def copy_table_contents(from, to, columns, rename = {})
+ column_mappings = Hash[columns.map { |name| [name, name] }]
rename.each { |a| column_mappings[a.last] = a.first }
from_columns = columns(from).collect(&:name)
- columns = columns.find_all{|col| from_columns.include?(column_mappings[col])}
+ columns = columns.find_all { |col| from_columns.include?(column_mappings[col]) }
from_columns_to_copy = columns.map { |col| column_mappings[col] }
- quoted_columns = columns.map { |col| quote_column_name(col) } * ','
- quoted_from_columns = from_columns_to_copy.map { |col| quote_column_name(col) } * ','
+ quoted_columns = columns.map { |col| quote_column_name(col) } * ","
+ quoted_from_columns = from_columns_to_copy.map { |col| quote_column_name(col) } * ","
exec_query("INSERT INTO #{quote_table_name(to)} (#{quoted_columns})
SELECT #{quoted_from_columns} FROM #{quote_table_name(from)}")
end
def sqlite_version
- @sqlite_version ||= SQLite3Adapter::Version.new(select_value('select sqlite_version(*)'))
+ @sqlite_version ||= SQLite3Adapter::Version.new(query_value("SELECT sqlite_version(*)"))
end
- def translate_exception(exception, message)
+ def translate_exception(exception, message:, sql:, binds:)
case exception.message
# SQLite 3.8.2 returns a newly formatted error message:
# UNIQUE constraint failed: *table_name*.*column_name*
# Older versions of SQLite return:
# column *column_name* is not unique
when /column(s)? .* (is|are) not unique/, /UNIQUE constraint failed: .*/
- RecordNotUnique.new(message)
+ RecordNotUnique.new(message, sql: sql, binds: binds)
+ when /.* may not be NULL/, /NOT NULL constraint failed: .*/
+ NotNullViolation.new(message, sql: sql, binds: binds)
+ when /FOREIGN KEY constraint failed/i
+ InvalidForeignKey.new(message, sql: sql, binds: binds)
else
super
end
end
- private
COLLATE_REGEX = /.*\"(\w+)\".*collate\s+\"(\w+)\".*/i.freeze
def table_structure_with_collation(table_name, basic_structure)
collation_hash = {}
- sql = "SELECT sql FROM
- (SELECT * FROM sqlite_master UNION ALL
- SELECT * FROM sqlite_temp_master)
- WHERE type='table' and name='#{ table_name }' \;"
+ sql = <<~SQL
+ SELECT sql FROM
+ (SELECT * FROM sqlite_master UNION ALL
+ SELECT * FROM sqlite_temp_master)
+ WHERE type = 'table' AND name = #{quote(table_name)}
+ SQL
# Result will have following sample string
# CREATE TABLE "users" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
# "password_digest" varchar COLLATE "NOCASE");
- result = exec_query(sql, 'SCHEMA').first
+ result = exec_query(sql, "SCHEMA").first
if result
- # Splitting with left parantheses and picking up last will return all
+ # Splitting with left parentheses and picking up last will return all
# columns separated with comma(,).
- columns_string = result["sql"].split('(').last
+ columns_string = result["sql"].split("(").last
- columns_string.split(',').each do |column_string|
+ columns_string.split(",").each do |column_string|
# This regex will match the column name and collation type and will save
# the value in $1 and $2 respectively.
- collation_hash[$1] = $2 if (COLLATE_REGEX =~ column_string)
+ collation_hash[$1] = $2 if COLLATE_REGEX =~ column_string
end
basic_structure.map! do |column|
- column_name = column['name']
+ column_name = column["name"]
if collation_hash.has_key? column_name
- column['collation'] = collation_hash[column_name]
+ column["collation"] = collation_hash[column_name]
end
column
end
else
- basic_structure.to_hash
+ basic_structure.to_a
end
end
+
+ def arel_visitor
+ Arel::Visitors::SQLite.new(self)
+ end
+
+ def build_statement_pool
+ StatementPool.new(self.class.type_cast_config_to_integer(@config[:statement_limit]))
+ end
+
+ def connect
+ @connection = ::SQLite3::Database.new(
+ @config[:database].to_s,
+ @config.merge(results_as_hash: true)
+ )
+ configure_connection
+ end
+
+ def configure_connection
+ @connection.busy_timeout(self.class.type_cast_config_to_integer(@config[:timeout])) if @config[:timeout]
+
+ execute("PRAGMA foreign_keys = ON", "SCHEMA")
+ end
+
+ class SQLite3Integer < Type::Integer # :nodoc:
+ private
+ def _limit
+ # INTEGER storage class can be stored 8 bytes value.
+ # See https://www.sqlite.org/datatype3.html#storage_classes_and_datatypes
+ limit || 8
+ end
+ end
+
+ ActiveRecord::Type.register(:integer, SQLite3Integer, adapter: :sqlite3)
end
+ ActiveSupport.run_load_hooks(:active_record_sqlite3adapter, SQLite3Adapter)
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/statement_pool.rb b/activerecord/lib/active_record/connection_adapters/statement_pool.rb
index 57463dd749..46bd831da7 100644
--- a/activerecord/lib/active_record/connection_adapters/statement_pool.rb
+++ b/activerecord/lib/active_record/connection_adapters/statement_pool.rb
@@ -1,11 +1,15 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionAdapters
- class StatementPool
+ class StatementPool # :nodoc:
include Enumerable
- def initialize(max = 1000)
- @cache = Hash.new { |h,pid| h[pid] = {} }
- @max = max
+ DEFAULT_STATEMENT_LIMIT = 1000
+
+ def initialize(statement_limit = nil)
+ @cache = Hash.new { |h, pid| h[pid] = {} }
+ @statement_limit = statement_limit || DEFAULT_STATEMENT_LIMIT
end
def each(&block)
@@ -25,7 +29,7 @@ module ActiveRecord
end
def []=(sql, stmt)
- while @max <= cache.size
+ while @statement_limit <= cache.size
dealloc(cache.shift.last)
end
cache[sql] = stmt
@@ -45,13 +49,13 @@ module ActiveRecord
private
- def cache
- @cache[Process.pid]
- end
+ def cache
+ @cache[Process.pid]
+ end
- def dealloc(stmt)
- raise NotImplementedError
- end
+ def dealloc(stmt)
+ raise NotImplementedError
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_handling.rb b/activerecord/lib/active_record/connection_handling.rb
index a8b3d03ba5..53069cd899 100644
--- a/activerecord/lib/active_record/connection_handling.rb
+++ b/activerecord/lib/active_record/connection_handling.rb
@@ -1,6 +1,8 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ConnectionHandling
- RAILS_ENV = -> { (Rails.env if defined?(Rails.env)) || ENV["RAILS_ENV"] || ENV["RACK_ENV"] }
+ RAILS_ENV = -> { (Rails.env if defined?(Rails.env)) || ENV["RAILS_ENV"].presence || ENV["RACK_ENV"].presence }
DEFAULT_ENV = -> { RAILS_ENV.call || "default_env" }
# Establishes the connection to the database. Accepts a hash as input where
@@ -44,40 +46,143 @@ module ActiveRecord
#
# The exceptions AdapterNotSpecified, AdapterNotFound and +ArgumentError+
# may be returned on an error.
- def establish_connection(spec = nil)
- spec ||= DEFAULT_ENV.call.to_sym
- resolver = ConnectionAdapters::ConnectionSpecification::Resolver.new configurations
- spec = resolver.spec(spec)
+ def establish_connection(config_or_env = nil)
+ config_hash = resolve_config_for_connection(config_or_env)
+ connection_handler.establish_connection(config_hash)
+ end
- unless respond_to?(spec.adapter_method)
- raise AdapterNotFound, "database configuration specifies nonexistent #{spec.config[:adapter]} adapter"
+ # Connects a model to the databases specified. The +database+ keyword
+ # takes a hash consisting of a +role+ and a +database_key+.
+ #
+ # This will create a connection handler for switching between connections,
+ # look up the config hash using the +database_key+ and finally
+ # establishes a connection to that config.
+ #
+ # class AnimalsModel < ApplicationRecord
+ # self.abstract_class = true
+ #
+ # connects_to database: { writing: :primary, reading: :primary_replica }
+ # end
+ #
+ # Returns an array of established connections.
+ def connects_to(database: {})
+ connections = []
+
+ database.each do |role, database_key|
+ config_hash = resolve_config_for_connection(database_key)
+ handler = lookup_connection_handler(role.to_sym)
+
+ connections << handler.establish_connection(config_hash)
end
- remove_connection
- connection_handler.establish_connection self, spec
+ connections
end
- class MergeAndResolveDefaultUrlConfig # :nodoc:
- def initialize(raw_configurations)
- @raw_config = raw_configurations.dup
- @env = DEFAULT_ENV.call.to_s
- end
+ # Connects to a database or role (ex writing, reading, or another
+ # custom role) for the duration of the block.
+ #
+ # If a role is passed, Active Record will look up the connection
+ # based on the requested role:
+ #
+ # ActiveRecord::Base.connected_to(role: :writing) do
+ # Dog.create! # creates dog using dog connection
+ # end
+ #
+ # ActiveRecord::Base.connected_to(role: :reading) do
+ # Dog.create! # throws exception because we're on a replica
+ # end
+ #
+ # ActiveRecord::Base.connected_to(role: :unknown_ode) do
+ # # raises exception due to non-existent role
+ # end
+ #
+ # For cases where you may want to connect to a database outside of the model,
+ # you can use +connected_to+ with a +database+ argument. The +database+ argument
+ # expects a symbol that corresponds to the database key in your config.
+ #
+ # This will connect to a new database for the queries inside the block.
+ #
+ # ActiveRecord::Base.connected_to(database: :animals_slow_replica) do
+ # Dog.run_a_long_query # runs a long query while connected to the +animals_slow_replica+
+ # end
+ def connected_to(database: nil, role: nil, &blk)
+ if database && role
+ raise ArgumentError, "connected_to can only accept a `database` or a `role` argument, but not both arguments."
+ elsif database
+ if database.is_a?(Hash)
+ role, database = database.first
+ role = role.to_sym
+ else
+ role = database.to_sym
+ end
- # Returns fully resolved connection hashes.
- # Merges connection information from `ENV['DATABASE_URL']` if available.
- def resolve
- ConnectionAdapters::ConnectionSpecification::Resolver.new(config).resolve_all
+ config_hash = resolve_config_for_connection(database)
+ handler = lookup_connection_handler(role)
+
+ with_handler(role) do
+ handler.establish_connection(config_hash)
+ yield
+ end
+ elsif role
+ with_handler(role.to_sym, &blk)
+ else
+ raise ArgumentError, "must provide a `database` or a `role`."
end
+ end
+
+ # Returns true if role is the current connected role.
+ #
+ # ActiveRecord::Base.connected_to(role: :writing) do
+ # ActiveRecord::Base.connected_to?(role: :writing) #=> true
+ # ActiveRecord::Base.connected_to?(role: :reading) #=> false
+ # end
+ def connected_to?(role:)
+ current_role == role.to_sym
+ end
+
+ # Returns the symbol representing the current connected role.
+ #
+ # ActiveRecord::Base.connected_to(role: :writing) do
+ # ActiveRecord::Base.current_role #=> :writing
+ # end
+ #
+ # ActiveRecord::Base.connected_to(role: :reading) do
+ # ActiveRecord::Base.current_role #=> :reading
+ # end
+ def current_role
+ connection_handlers.key(connection_handler)
+ end
+
+ def lookup_connection_handler(handler_key) # :nodoc:
+ connection_handlers[handler_key] ||= ActiveRecord::ConnectionAdapters::ConnectionHandler.new
+ end
+
+ def with_handler(handler_key, &blk) # :nodoc:
+ handler = lookup_connection_handler(handler_key)
+ swap_connection_handler(handler, &blk)
+ end
+
+ def resolve_config_for_connection(config_or_env) # :nodoc:
+ raise "Anonymous class is not allowed." unless name
+
+ config_or_env ||= DEFAULT_ENV.call.to_sym
+ pool_name = self == Base ? "primary" : name
+ self.connection_specification_name = pool_name
- private
- def config
- @raw_config.dup.tap do |cfg|
- if url = ENV['DATABASE_URL']
- cfg[@env] ||= {}
- cfg[@env]["url"] ||= url
- end
- end
+ resolver = ConnectionAdapters::ConnectionSpecification::Resolver.new(Base.configurations)
+ config_hash = resolver.resolve(config_or_env, pool_name).symbolize_keys
+ config_hash[:name] = pool_name
+
+ config_hash
+ end
+
+ # Clears the query cache for all connections associated with the current thread.
+ def clear_query_caches_for_current_thread
+ ActiveRecord::Base.connection_handlers.each_value do |handler|
+ handler.connection_pool_list.each do |pool|
+ pool.connection.clear_query_cache if pool.active_connection?
end
+ end
end
# Returns the connection currently associated with the class. This can
@@ -87,12 +192,14 @@ module ActiveRecord
retrieve_connection
end
- def connection_id
- ActiveRecord::RuntimeRegistry.connection_id ||= Thread.current.object_id
- end
+ attr_writer :connection_specification_name
- def connection_id=(connection_id)
- ActiveRecord::RuntimeRegistry.connection_id = connection_id
+ # Return the specification name from the current class or its parent.
+ def connection_specification_name
+ if !defined?(@connection_specification_name) || @connection_specification_name.nil?
+ return self == Base ? "primary" : superclass.connection_specification_name
+ end
+ @connection_specification_name
end
# Returns the configuration of the associated connection as a hash:
@@ -106,20 +213,28 @@ module ActiveRecord
end
def connection_pool
- connection_handler.retrieve_connection_pool(self) or raise ConnectionNotEstablished
+ connection_handler.retrieve_connection_pool(connection_specification_name) || raise(ConnectionNotEstablished)
end
def retrieve_connection
- connection_handler.retrieve_connection(self)
+ connection_handler.retrieve_connection(connection_specification_name)
end
# Returns +true+ if Active Record is connected.
def connected?
- connection_handler.connected?(self)
+ connection_handler.connected?(connection_specification_name)
end
- def remove_connection(klass = self)
- connection_handler.remove_connection(klass)
+ def remove_connection(name = nil)
+ name ||= @connection_specification_name if defined?(@connection_specification_name)
+ # if removing a connection that has a pool, we reset the
+ # connection_specification_name so it will use the parent
+ # pool.
+ if connection_handler.retrieve_connection_pool(name)
+ self.connection_specification_name = nil
+ end
+
+ connection_handler.remove_connection(name)
end
def clear_cache! # :nodoc:
@@ -127,6 +242,15 @@ module ActiveRecord
end
delegate :clear_active_connections!, :clear_reloadable_connections!,
- :clear_all_connections!, :to => :connection_handler
+ :clear_all_connections!, :flush_idle_connections!, to: :connection_handler
+
+ private
+
+ def swap_connection_handler(handler, &blk) # :nodoc:
+ old_handler, ActiveRecord::Base.connection_handler = ActiveRecord::Base.connection_handler, handler
+ yield
+ ensure
+ ActiveRecord::Base.connection_handler = old_handler
+ end
end
end
diff --git a/activerecord/lib/active_record/core.rb b/activerecord/lib/active_record/core.rb
index c8343dd97f..eb4b48bc37 100644
--- a/activerecord/lib/active_record/core.rb
+++ b/activerecord/lib/active_record/core.rb
@@ -1,7 +1,9 @@
-require 'thread'
-require 'active_support/core_ext/hash/indifferent_access'
-require 'active_support/core_ext/object/duplicable'
-require 'active_support/core_ext/string/filters'
+# frozen_string_literal: true
+
+require "active_support/core_ext/hash/indifferent_access"
+require "active_support/core_ext/string/filters"
+require "active_support/parameter_filter"
+require "concurrent/map"
module ActiveRecord
module Core
@@ -17,8 +19,15 @@ module ActiveRecord
mattr_accessor :logger, instance_writer: false
##
+ # :singleton-method:
+ #
+ # Specifies if the methods calling database queries should be logged below
+ # their relevant queries. Defaults to false.
+ mattr_accessor :verbose_query_logs, instance_writer: false, default: false
+
+ ##
# Contains the database configuration - as is typically stored in config/database.yml -
- # as a Hash.
+ # as an ActiveRecord::DatabaseConfigurations object.
#
# For example, the following database.yml...
#
@@ -32,22 +41,18 @@ module ActiveRecord
#
# ...would result in ActiveRecord::Base.configurations to look like this:
#
- # {
- # 'development' => {
- # 'adapter' => 'sqlite3',
- # 'database' => 'db/development.sqlite3'
- # },
- # 'production' => {
- # 'adapter' => 'sqlite3',
- # 'database' => 'db/production.sqlite3'
- # }
- # }
+ # #<ActiveRecord::DatabaseConfigurations:0x00007fd1acbdf800 @configurations=[
+ # #<ActiveRecord::DatabaseConfigurations::HashConfig:0x00007fd1acbded10 @env_name="development",
+ # @spec_name="primary", @config={"adapter"=>"sqlite3", "database"=>"db/development.sqlite3"}>,
+ # #<ActiveRecord::DatabaseConfigurations::HashConfig:0x00007fd1acbdea90 @env_name="production",
+ # @spec_name="primary", @config={"adapter"=>"mysql2", "database"=>"db/production.sqlite3"}>
+ # ]>
def self.configurations=(config)
- @@configurations = ActiveRecord::ConnectionHandling::MergeAndResolveDefaultUrlConfig.new(config).resolve
+ @@configurations = ActiveRecord::DatabaseConfigurations.new(config)
end
self.configurations = {}
- # Returns fully resolved configurations hash
+ # Returns fully resolved ActiveRecord::DatabaseConfigurations object
def self.configurations
@@configurations
end
@@ -56,8 +61,7 @@ module ActiveRecord
# :singleton-method:
# Determines whether to use Time.utc (using :utc) or Time.local (using :local) when pulling
# dates and times from the database. This is set to :utc by default.
- mattr_accessor :default_timezone, instance_writer: false
- self.default_timezone = :utc
+ mattr_accessor :default_timezone, instance_writer: false, default: :utc
##
# :singleton-method:
@@ -67,32 +71,37 @@ module ActiveRecord
# ActiveRecord::Schema file which can be loaded into any database that
# supports migrations. Use :ruby if you want to have different database
# adapters for, e.g., your development and test environments.
- mattr_accessor :schema_format, instance_writer: false
- self.schema_format = :ruby
+ mattr_accessor :schema_format, instance_writer: false, default: :ruby
##
# :singleton-method:
- # Specifies if an error should be raised on query limit or order being
+ # Specifies if an error should be raised if the query has an order being
# ignored when doing batch queries. Useful in applications where the
- # limit or scope being ignored is error-worthy, rather than a warning.
- mattr_accessor :error_on_ignored_order_or_limit, instance_writer: false
- self.error_on_ignored_order_or_limit = false
+ # scope being ignored is error-worthy, rather than a warning.
+ mattr_accessor :error_on_ignored_order, instance_writer: false, default: false
+
+ # :singleton-method:
+ # Specify the behavior for unsafe raw query methods. Values are as follows
+ # deprecated - Warnings are logged when unsafe raw SQL is passed to
+ # query methods.
+ # disabled - Unsafe raw SQL passed to query methods results in
+ # UnknownAttributeReference exception.
+ mattr_accessor :allow_unsafe_raw_sql, instance_writer: false, default: :deprecated
##
# :singleton-method:
# Specify whether or not to use timestamps for migration versions
- mattr_accessor :timestamped_migrations, instance_writer: false
- self.timestamped_migrations = true
+ mattr_accessor :timestamped_migrations, instance_writer: false, default: true
##
# :singleton-method:
# Specify whether schema dump should happen at the end of the
- # db:migrate rake task. This is true by default, which is useful for the
+ # db:migrate rails command. This is true by default, which is useful for the
# development environment. This should ideally be false in the production
# environment where dumping schema is rarely needed.
- mattr_accessor :dump_schema_after_migration, instance_writer: false
- self.dump_schema_after_migration = true
+ mattr_accessor :dump_schema_after_migration, instance_writer: false, default: true
+ mattr_accessor :database_selector, instance_writer: false
##
# :singleton-method:
# Specifies which database schemas to dump when calling db:structure:dump.
@@ -100,8 +109,7 @@ module ActiveRecord
# schema_search_path are dumped. Use :all to dump all schemas regardless
# of schema_search_path, or a string of comma separated schemas for a
# custom list.
- mattr_accessor :dump_schemas, instance_writer: false
- self.dump_schemas = :schema_search_path
+ mattr_accessor :dump_schemas, instance_writer: false, default: :schema_search_path
##
# :singleton-method:
@@ -110,33 +118,35 @@ module ActiveRecord
# be used to identify queries which load thousands of records and
# potentially cause memory bloat.
mattr_accessor :warn_on_records_fetched_greater_than, instance_writer: false
- self.warn_on_records_fetched_greater_than = nil
mattr_accessor :maintain_test_schema, instance_accessor: false
mattr_accessor :belongs_to_required_by_default, instance_accessor: false
+ mattr_accessor :connection_handlers, instance_accessor: false, default: {}
+
+ mattr_accessor :writing_role, instance_accessor: false, default: :writing
+
+ mattr_accessor :reading_role, instance_accessor: false, default: :reading
+
class_attribute :default_connection_handler, instance_writer: false
+ self.filter_attributes = []
+
def self.connection_handler
- ActiveRecord::RuntimeRegistry.connection_handler || default_connection_handler
+ Thread.current.thread_variable_get("ar_connection_handler") || default_connection_handler
end
def self.connection_handler=(handler)
- ActiveRecord::RuntimeRegistry.connection_handler = handler
+ Thread.current.thread_variable_set("ar_connection_handler", handler)
end
self.default_connection_handler = ConnectionAdapters::ConnectionHandler.new
end
module ClassMethods
- def allocate
- define_attribute_methods
- super
- end
-
def initialize_find_by_cache # :nodoc:
- @find_by_statement_cache = { true => {}.extend(Mutex_m), false => {}.extend(Mutex_m) }
+ @find_by_statement_cache = { true => Concurrent::Map.new, false => Concurrent::Map.new }
end
def inherited(child_class) # :nodoc:
@@ -151,41 +161,34 @@ module ActiveRecord
return super if block_given? ||
primary_key.nil? ||
scope_attributes? ||
- columns_hash.include?(inheritance_column) ||
- ids.first.kind_of?(Array)
-
- id = ids.first
- if ActiveRecord::Base === id
- id = id.id
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
- You are passing an instance of ActiveRecord::Base to `find`.
- Please pass the id of the object by calling `.id`
- MSG
- end
+ columns_hash.key?(inheritance_column) && !base_class?
+
+ id = ids.first
+
+ return super if StatementCache.unsupported_value?(id)
key = primary_key
statement = cached_find_by_statement(key) { |params|
where(key => params.bind).limit(1)
}
- record = statement.execute([id], self, connection).first
+
+ record = statement.execute([id], connection)&.first
unless record
raise RecordNotFound.new("Couldn't find #{name} with '#{primary_key}'=#{id}",
name, primary_key, id)
end
record
- rescue RangeError
- raise RecordNotFound.new("Couldn't find #{name} with an out of range value for '#{primary_key}'",
- name, primary_key)
end
def find_by(*args) # :nodoc:
- return super if scope_attributes? || !(Hash === args.first) || reflect_on_all_aggregations.any?
+ return super if scope_attributes? || reflect_on_all_aggregations.any? ||
+ columns_hash.key?(inheritance_column) && !base_class?
hash = args.first
- return super if hash.values.any? { |v|
- v.nil? || Array === v || Hash === v || Relation === v
+ return super if !(Hash === hash) || hash.values.any? { |v|
+ StatementCache.unsupported_value?(v)
}
# We can't cache Post.find_by(author: david) ...yet
@@ -200,32 +203,44 @@ module ActiveRecord
where(wheres).limit(1)
}
begin
- statement.execute(hash.values, self, connection).first
+ statement.execute(hash.values, connection)&.first
rescue TypeError
raise ActiveRecord::StatementInvalid
- rescue RangeError
- nil
end
end
def find_by!(*args) # :nodoc:
- find_by(*args) or raise RecordNotFound.new("Couldn't find #{name}", name)
+ find_by(*args) || raise(RecordNotFound.new("Couldn't find #{name}", name))
end
def initialize_generated_modules # :nodoc:
generated_association_methods
end
- def generated_association_methods
+ def generated_association_methods # :nodoc:
@generated_association_methods ||= begin
mod = const_set(:GeneratedAssociationMethods, Module.new)
+ private_constant :GeneratedAssociationMethods
include mod
+
mod
end
end
+ # Returns columns which shouldn't be exposed while calling +#inspect+.
+ def filter_attributes
+ if defined?(@filter_attributes)
+ @filter_attributes
+ else
+ superclass.filter_attributes
+ end
+ end
+
+ # Specifies columns which shouldn't be exposed while calling +#inspect+.
+ attr_writer :filter_attributes
+
# Returns a string like 'Post(id:integer, title:string, body:text)'
- def inspect
+ def inspect # :nodoc:
if self == Base
super
elsif abstract_class?
@@ -233,37 +248,27 @@ module ActiveRecord
elsif !connected?
"#{super} (call '#{super}.connection' to establish a connection)"
elsif table_exists?
- attr_list = attribute_types.map { |name, type| "#{name}: #{type.type}" } * ', '
+ attr_list = attribute_types.map { |name, type| "#{name}: #{type.type}" } * ", "
"#{super}(#{attr_list})"
else
"#{super}(Table doesn't exist)"
end
end
- # Overwrite the default class equality method to provide support for association proxies.
- def ===(object)
+ # Overwrite the default class equality method to provide support for decorated models.
+ def ===(object) # :nodoc:
object.is_a?(self)
end
# Returns an instance of <tt>Arel::Table</tt> loaded with the current table name.
#
# class Post < ActiveRecord::Base
- # scope :published_and_commented, -> { published.and(self.arel_table[:comments_count].gt(0)) }
+ # scope :published_and_commented, -> { published.and(arel_table[:comments_count].gt(0)) }
# end
def arel_table # :nodoc:
@arel_table ||= Arel::Table.new(table_name, type_caster: type_caster)
end
- # Returns the Arel engine.
- def arel_engine # :nodoc:
- @arel_engine ||=
- if Base == self || connection_handler.retrieve_connection_pool(self)
- self
- else
- superclass.arel_engine
- end
- end
-
def arel_attribute(name, table = arel_table) # :nodoc:
name = attribute_alias(name) if attribute_alias?(name)
table[name]
@@ -277,28 +282,31 @@ module ActiveRecord
TypeCaster::Map.new(self)
end
+ def _internal? # :nodoc:
+ false
+ end
+
private
- def cached_find_by_statement(key, &block) # :nodoc:
- cache = @find_by_statement_cache[connection.prepared_statements]
- cache[key] || cache.synchronize {
- cache[key] ||= StatementCache.create(connection, &block)
- }
- end
+ def cached_find_by_statement(key, &block)
+ cache = @find_by_statement_cache[connection.prepared_statements]
+ cache.compute_if_absent(key) { StatementCache.create(connection, &block) }
+ end
- def relation # :nodoc:
- relation = Relation.create(self, arel_table, predicate_builder)
+ def relation
+ relation = Relation.create(self)
- if finder_needs_type_condition? && !ignore_default_scope?
- relation.where(type_condition).create_with(inheritance_column.to_sym => sti_name)
- else
- relation
+ if finder_needs_type_condition? && !ignore_default_scope?
+ relation.where!(type_condition)
+ relation.create_with!(inheritance_column.to_s => sti_name)
+ else
+ relation
+ end
end
- end
- def table_metadata # :nodoc:
- TableMetadata.new(self, arel_table)
- end
+ def table_metadata
+ TableMetadata.new(self, arel_table)
+ end
end
# New objects can be instantiated as either empty (pass no construction parameter) or pre-set with
@@ -310,8 +318,8 @@ module ActiveRecord
# # Instantiates a single new object
# User.new(first_name: 'Jamie')
def initialize(attributes = nil)
- @attributes = self.class._default_attributes.deep_dup
self.class.define_attribute_methods
+ @attributes = self.class._default_attributes.deep_dup
init_internals
initialize_internals_callback
@@ -336,16 +344,26 @@ module ActiveRecord
# post = Post.allocate
# post.init_with(coder)
# post.title # => 'hello world'
- def init_with(coder)
+ def init_with(coder, &block)
coder = LegacyYamlAdapter.convert(self.class, coder)
- @attributes = coder['attributes']
+ attributes = self.class.yaml_encoder.decode(coder)
+ init_with_attributes(attributes, coder["new_record"], &block)
+ end
+ ##
+ # Initialize an empty model object from +attributes+.
+ # +attributes+ should be an attributes object, and unlike the
+ # `initialize` method, no assignment calls are made per attribute.
+ def init_with_attributes(attributes, new_record = false) # :nodoc:
init_internals
- @new_record = coder['new_record']
+ @new_record = new_record
+ @attributes = attributes
self.class.define_attribute_methods
+ yield self if block_given?
+
_run_find_callbacks
_run_initialize_callbacks
@@ -385,8 +403,10 @@ module ActiveRecord
_run_initialize_callbacks
- @new_record = true
- @destroyed = false
+ @new_record = true
+ @destroyed = false
+ @_start_transaction_state = {}
+ @transaction_state = nil
super
end
@@ -404,11 +424,9 @@ module ActiveRecord
# Post.new.encode_with(coder)
# coder # => {"attributes" => {"id" => nil, ... }}
def encode_with(coder)
- # FIXME: Remove this when we better serialize attributes
- coder['raw_attributes'] = attributes_before_type_cast
- coder['attributes'] = @attributes
- coder['new_record'] = new_record?
- coder['active_record_yaml_version'] = 1
+ self.class.yaml_encoder.encode(@attributes, coder)
+ coder["new_record"] = new_record?
+ coder["active_record_yaml_version"] = 2
end
# Returns true if +comparison_object+ is the same exact object, or +comparison_object+
@@ -432,7 +450,7 @@ module ActiveRecord
# [ Person.find(1), Person.find(2), Person.find(3) ] & [ Person.find(1), Person.find(4) ] # => [ Person.find(1) ]
def hash
if id
- id.hash
+ self.class.hash ^ id.hash
else
super
end
@@ -454,12 +472,20 @@ module ActiveRecord
# Allows sort on objects
def <=>(other_object)
if other_object.is_a?(self.class)
- self.to_key <=> other_object.to_key
+ to_key <=> other_object.to_key
else
super
end
end
+ def present? # :nodoc:
+ true
+ end
+
+ def blank? # :nodoc:
+ false
+ end
+
# Returns +true+ if the record is read only. Records loaded through joins with piggy-back
# attributes will be marked as read only since they cannot be saved.
def readonly?
@@ -480,14 +506,22 @@ module ActiveRecord
# We check defined?(@attributes) not to issue warnings if the object is
# allocated but not initialized.
inspection = if defined?(@attributes) && @attributes
- self.class.column_names.collect { |name|
- if has_attribute?(name)
- "#{name}: #{attribute_for_inspect(name)}"
- end
- }.compact.join(", ")
- else
- "not initialized"
- end
+ self.class.attribute_names.collect do |name|
+ if has_attribute?(name)
+ attr = _read_attribute(name)
+ value = if attr.nil?
+ attr.inspect
+ else
+ attr = format_for_inspect(attr)
+ inspection_filter.filter_param(name, attr)
+ end
+ "#{name}: #{value}"
+ end
+ end.compact.join(", ")
+ else
+ "not initialized"
+ end
+
"#<#{self.class} #{inspection}>"
end
@@ -497,65 +531,75 @@ module ActiveRecord
return super if custom_inspect_method_defined?
pp.object_address_group(self) do
if defined?(@attributes) && @attributes
- column_names = self.class.column_names.select { |name| has_attribute?(name) || new_record? }
- pp.seplist(column_names, proc { pp.text ',' }) do |column_name|
- column_value = read_attribute(column_name)
- pp.breakable ' '
+ attr_names = self.class.attribute_names.select { |name| has_attribute?(name) }
+ pp.seplist(attr_names, proc { pp.text "," }) do |attr_name|
+ pp.breakable " "
pp.group(1) do
- pp.text column_name
- pp.text ':'
+ pp.text attr_name
+ pp.text ":"
pp.breakable
- pp.pp column_value
+ value = _read_attribute(attr_name)
+ value = inspection_filter.filter_param(attr_name, value) unless value.nil?
+ pp.pp value
end
end
else
- pp.breakable ' '
- pp.text 'not initialized'
+ pp.breakable " "
+ pp.text "not initialized"
end
end
end
# Returns a hash of the given methods with their names as keys and returned values as values.
def slice(*methods)
- Hash[methods.map! { |method| [method, public_send(method)] }].with_indifferent_access
+ Hash[methods.flatten.map! { |method| [method, public_send(method)] }].with_indifferent_access
end
private
- # Under Ruby 1.9, Array#flatten will call #to_ary (recursively) on each of the elements
- # of the array, and then rescues from the possible NoMethodError. If those elements are
- # ActiveRecord::Base's, then this triggers the various method_missing's that we have,
- # which significantly impacts upon performance.
- #
- # So we can avoid the method_missing hit by explicitly defining #to_ary as nil here.
- #
- # See also http://tenderlovemaking.com/2011/06/28/til-its-ok-to-return-nil-from-to_ary.html
- def to_ary # :nodoc:
- nil
- end
+ # +Array#flatten+ will call +#to_ary+ (recursively) on each of the elements of
+ # the array, and then rescues from the possible +NoMethodError+. If those elements are
+ # +ActiveRecord::Base+'s, then this triggers the various +method_missing+'s that we have,
+ # which significantly impacts upon performance.
+ #
+ # So we can avoid the +method_missing+ hit by explicitly defining +#to_ary+ as +nil+ here.
+ #
+ # See also https://tenderlovemaking.com/2011/06/28/til-its-ok-to-return-nil-from-to_ary.html
+ def to_ary
+ nil
+ end
- def init_internals
- @readonly = false
- @destroyed = false
- @marked_for_destruction = false
- @destroyed_by_association = nil
- @new_record = true
- @txn = nil
- @_start_transaction_state = {}
- @transaction_state = nil
- end
+ def init_internals
+ @readonly = false
+ @destroyed = false
+ @marked_for_destruction = false
+ @destroyed_by_association = nil
+ @new_record = true
+ @_start_transaction_state = {}
+ @transaction_state = nil
+ end
- def initialize_internals_callback
- end
+ def initialize_internals_callback
+ end
- def thaw
- if frozen?
- @attributes = @attributes.dup
+ def thaw
+ if frozen?
+ @attributes = @attributes.dup
+ end
end
- end
- def custom_inspect_method_defined?
- self.class.instance_method(:inspect).owner != ActiveRecord::Base.instance_method(:inspect).owner
- end
+ def custom_inspect_method_defined?
+ self.class.instance_method(:inspect).owner != ActiveRecord::Base.instance_method(:inspect).owner
+ end
+
+ def inspection_filter
+ @inspection_filter ||= begin
+ mask = DelegateClass(::String).new(ActiveSupport::ParameterFilter::FILTERED)
+ def mask.pretty_print(pp)
+ pp.text __getobj__
+ end
+ ActiveSupport::ParameterFilter.new(self.class.filter_attributes, mask: mask)
+ end
+ end
end
end
diff --git a/activerecord/lib/active_record/counter_cache.rb b/activerecord/lib/active_record/counter_cache.rb
index 1b6817554d..27c1b7a311 100644
--- a/activerecord/lib/active_record/counter_cache.rb
+++ b/activerecord/lib/active_record/counter_cache.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
# = Active Record Counter Cache
module CounterCache
@@ -12,13 +14,21 @@ module ActiveRecord
#
# * +id+ - The id of the object you wish to reset a counter on.
# * +counters+ - One or more association counters to reset. Association name or counter name can be given.
+ # * <tt>:touch</tt> - Touch timestamp columns when updating.
+ # Pass +true+ to touch +updated_at+ and/or +updated_on+. Pass a symbol to
+ # touch that column or an array of symbols to touch just those ones.
#
# ==== Examples
#
- # # For Post with id #1 records reset the comments_count
+ # # For the Post with id #1, reset the comments_count
# Post.reset_counters(1, :comments)
- def reset_counters(id, *counters)
+ #
+ # # Like above, but also touch the +updated_at+ and/or +updated_on+
+ # # attributes.
+ # Post.reset_counters(1, :comments, touch: true)
+ def reset_counters(id, *counters, touch: nil)
object = find(id)
+
counters.each do |counter_association|
has_many_association = _reflect_on_association(counter_association)
unless has_many_association
@@ -26,7 +36,7 @@ module ActiveRecord
has_many_association = has_many.find { |association| association.counter_cache_column && association.counter_cache_column.to_sym == counter_association.to_sym }
counter_association = has_many_association.plural_name if has_many_association
end
- raise ArgumentError, "'#{self.name}' has no association called '#{counter_association}'" unless has_many_association
+ raise ArgumentError, "'#{name}' has no association called '#{counter_association}'" unless has_many_association
if has_many_association.is_a? ActiveRecord::Reflection::ThroughReflection
has_many_association = has_many_association.through_reflection
@@ -37,11 +47,17 @@ module ActiveRecord
reflection = child_class._reflections.values.find { |e| e.belongs_to? && e.foreign_key.to_s == foreign_key && e.options[:counter_cache].present? }
counter_name = reflection.counter_cache_column
- unscoped.where(primary_key => object.id).update_all(
- counter_name => object.send(counter_association).count(:all)
- )
+ updates = { counter_name => object.send(counter_association).count(:all) }
+
+ if touch
+ names = touch if touch != true
+ updates.merge!(touch_attributes_with_time(*names))
+ end
+
+ unscoped.where(primary_key => object.id).update_all(updates)
end
- return true
+
+ true
end
# A generic "counter updater" implementation, intended primarily to be
@@ -55,6 +71,9 @@ module ActiveRecord
# * +id+ - The id of the object you wish to update a counter on or an array of ids.
# * +counters+ - A Hash containing the names of the fields
# to update as keys and the amount to update the field by as values.
+ # * <tt>:touch</tt> option - Touch timestamp columns when updating.
+ # If attribute names are passed, they are updated along with updated_at/on
+ # attributes.
#
# ==== Examples
#
@@ -73,14 +92,17 @@ module ActiveRecord
# # UPDATE posts
# # SET comment_count = COALESCE(comment_count, 0) + 1
# # WHERE id IN (10, 15)
+ #
+ # # For the Posts with id of 10 and 15, increment the comment_count by 1
+ # # and update the updated_at value for each counter.
+ # Post.update_counters [10, 15], comment_count: 1, touch: true
+ # # Executes the following SQL:
+ # # UPDATE posts
+ # # SET comment_count = COALESCE(comment_count, 0) + 1,
+ # # `updated_at` = '2016-10-13T09:59:23-05:00'
+ # # WHERE id IN (10, 15)
def update_counters(id, counters)
- updates = counters.map do |counter_name, value|
- operator = value < 0 ? '-' : '+'
- quoted_column = connection.quote_column_name(counter_name)
- "#{quoted_column} = COALESCE(#{quoted_column}, 0) #{operator} #{value.abs}"
- end
-
- unscoped.where(primary_key => id).update_all updates.join(', ')
+ unscoped.where!(primary_key => id).update_counters(counters)
end
# Increment a numeric field by one, via a direct SQL update.
@@ -94,13 +116,20 @@ module ActiveRecord
#
# * +counter_name+ - The name of the field that should be incremented.
# * +id+ - The id of the object that should be incremented or an array of ids.
+ # * <tt>:touch</tt> - Touch timestamp columns when updating.
+ # Pass +true+ to touch +updated_at+ and/or +updated_on+. Pass a symbol to
+ # touch that column or an array of symbols to touch just those ones.
#
# ==== Examples
#
# # Increment the posts_count column for the record with an id of 5
# DiscussionBoard.increment_counter(:posts_count, 5)
- def increment_counter(counter_name, id)
- update_counters(id, counter_name => 1)
+ #
+ # # Increment the posts_count column for the record with an id of 5
+ # # and update the updated_at value.
+ # DiscussionBoard.increment_counter(:posts_count, 5, touch: true)
+ def increment_counter(counter_name, id, touch: nil)
+ update_counters(id, counter_name => 1, touch: touch)
end
# Decrement a numeric field by one, via a direct SQL update.
@@ -112,26 +141,29 @@ module ActiveRecord
#
# * +counter_name+ - The name of the field that should be decremented.
# * +id+ - The id of the object that should be decremented or an array of ids.
+ # * <tt>:touch</tt> - Touch timestamp columns when updating.
+ # Pass +true+ to touch +updated_at+ and/or +updated_on+. Pass a symbol to
+ # touch that column or an array of symbols to touch just those ones.
#
# ==== Examples
#
# # Decrement the posts_count column for the record with an id of 5
# DiscussionBoard.decrement_counter(:posts_count, 5)
- def decrement_counter(counter_name, id)
- update_counters(id, counter_name => -1)
+ #
+ # # Decrement the posts_count column for the record with an id of 5
+ # # and update the updated_at value.
+ # DiscussionBoard.decrement_counter(:posts_count, 5, touch: true)
+ def decrement_counter(counter_name, id, touch: nil)
+ update_counters(id, counter_name => -1, touch: touch)
end
end
private
-
- def _create_record(*)
+ def _create_record(attribute_names = self.attribute_names)
id = super
each_counter_cached_associations do |association|
- if send(association.reflection.name)
- association.increment_counters
- @_after_create_counter_called = true
- end
+ association.increment_counters
end
id
@@ -144,9 +176,7 @@ module ActiveRecord
each_counter_cached_associations do |association|
foreign_key = association.reflection.foreign_key.to_sym
unless destroyed_by_association && destroyed_by_association.foreign_key.to_sym == foreign_key
- if send(association.reflection.name)
- association.decrement_counters
- end
+ association.decrement_counters
end
end
end
@@ -159,6 +189,5 @@ module ActiveRecord
yield association(name.to_sym) if reflection.belongs_to? && reflection.counter_cache_column
end
end
-
end
end
diff --git a/activerecord/lib/active_record/database_configurations.rb b/activerecord/lib/active_record/database_configurations.rb
new file mode 100644
index 0000000000..44b5cfc738
--- /dev/null
+++ b/activerecord/lib/active_record/database_configurations.rb
@@ -0,0 +1,204 @@
+# frozen_string_literal: true
+
+require "active_record/database_configurations/database_config"
+require "active_record/database_configurations/hash_config"
+require "active_record/database_configurations/url_config"
+
+module ActiveRecord
+ # ActiveRecord::DatabaseConfigurations returns an array of DatabaseConfig
+ # objects (either a HashConfig or UrlConfig) that are constructed from the
+ # application's database configuration hash or URL string.
+ class DatabaseConfigurations
+ attr_reader :configurations
+ delegate :any?, to: :configurations
+
+ def initialize(configurations = {})
+ @configurations = build_configs(configurations)
+ end
+
+ # Collects the configs for the environment and optionally the specification
+ # name passed in. To include replica configurations pass <tt>include_replicas: true</tt>.
+ #
+ # If a spec name is provided a single DatabaseConfig object will be
+ # returned, otherwise an array of DatabaseConfig objects will be
+ # returned that corresponds with the environment and type requested.
+ #
+ # ==== Options
+ #
+ # * <tt>env_name:</tt> The environment name. Defaults to +nil+ which will collect
+ # configs for all environments.
+ # * <tt>spec_name:</tt> The specification name (i.e. primary, animals, etc.). Defaults
+ # to +nil+.
+ # * <tt>include_replicas:</tt> Determines whether to include replicas in
+ # the returned list. Most of the time we're only iterating over the write
+ # connection (i.e. migrations don't need to run for the write and read connection).
+ # Defaults to +false+.
+ def configs_for(env_name: nil, spec_name: nil, include_replicas: false)
+ configs = env_with_configs(env_name)
+
+ unless include_replicas
+ configs = configs.select do |db_config|
+ !db_config.replica?
+ end
+ end
+
+ if spec_name
+ configs.find do |db_config|
+ db_config.spec_name == spec_name
+ end
+ else
+ configs
+ end
+ end
+
+ # Returns the config hash that corresponds with the environment
+ #
+ # If the application has multiple databases +default_hash+ will
+ # return the first config hash for the environment.
+ #
+ # { database: "my_db", adapter: "mysql2" }
+ def default_hash(env = ActiveRecord::ConnectionHandling::DEFAULT_ENV.call.to_s)
+ default = find_db_config(env)
+ default.config if default
+ end
+ alias :[] :default_hash
+
+ # Returns a single DatabaseConfig object based on the requested environment.
+ #
+ # If the application has multiple databases +find_db_config+ will return
+ # the first DatabaseConfig for the environment.
+ def find_db_config(env)
+ configurations.find do |db_config|
+ db_config.env_name == env.to_s ||
+ (db_config.for_current_env? && db_config.spec_name == env.to_s)
+ end
+ end
+
+ # Returns the DatabaseConfigurations object as a Hash.
+ def to_h
+ configs = configurations.reverse.inject({}) do |memo, db_config|
+ memo.merge(db_config.to_legacy_hash)
+ end
+
+ Hash[configs.to_a.reverse]
+ end
+
+ # Checks if the application's configurations are empty.
+ #
+ # Aliased to blank?
+ def empty?
+ configurations.empty?
+ end
+ alias :blank? :empty?
+
+ private
+ def env_with_configs(env = nil)
+ if env
+ configurations.select { |db_config| db_config.env_name == env }
+ else
+ configurations
+ end
+ end
+
+ def build_configs(configs)
+ return configs.configurations if configs.is_a?(DatabaseConfigurations)
+ return configs if configs.is_a?(Array)
+
+ build_db_config = configs.each_pair.flat_map do |env_name, config|
+ walk_configs(env_name.to_s, "primary", config)
+ end.flatten.compact
+
+ if url = ENV["DATABASE_URL"]
+ build_url_config(url, build_db_config)
+ else
+ build_db_config
+ end
+ end
+
+ def walk_configs(env_name, spec_name, config)
+ case config
+ when String
+ build_db_config_from_string(env_name, spec_name, config)
+ when Hash
+ build_db_config_from_hash(env_name, spec_name, config.stringify_keys)
+ end
+ end
+
+ def build_db_config_from_string(env_name, spec_name, config)
+ url = config
+ uri = URI.parse(url)
+ if uri.try(:scheme)
+ ActiveRecord::DatabaseConfigurations::UrlConfig.new(env_name, spec_name, url)
+ end
+ rescue URI::InvalidURIError
+ ActiveRecord::DatabaseConfigurations::HashConfig.new(env_name, spec_name, config)
+ end
+
+ def build_db_config_from_hash(env_name, spec_name, config)
+ if config.has_key?("url")
+ url = config["url"]
+ config_without_url = config.dup
+ config_without_url.delete "url"
+
+ ActiveRecord::DatabaseConfigurations::UrlConfig.new(env_name, spec_name, url, config_without_url)
+ elsif config["database"] || (config.size == 1 && config.values.all? { |v| v.is_a? String })
+ ActiveRecord::DatabaseConfigurations::HashConfig.new(env_name, spec_name, config)
+ else
+ config.each_pair.map do |sub_spec_name, sub_config|
+ walk_configs(env_name, sub_spec_name, sub_config)
+ end
+ end
+ end
+
+ def build_url_config(url, configs)
+ env = ActiveRecord::ConnectionHandling::DEFAULT_ENV.call.to_s
+
+ if original_config = configs.find(&:for_current_env?)
+ if original_config.url_config?
+ configs
+ else
+ configs.map do |config|
+ ActiveRecord::DatabaseConfigurations::UrlConfig.new(config.env_name, config.spec_name, url, config.config)
+ end
+ end
+ else
+ configs + [ActiveRecord::DatabaseConfigurations::UrlConfig.new(env, "primary", url)]
+ end
+ end
+
+ def method_missing(method, *args, &blk)
+ case method
+ when :each, :first
+ throw_getter_deprecation(method)
+ configurations.send(method, *args, &blk)
+ when :fetch
+ throw_getter_deprecation(method)
+ configs_for(env_name: args.first)
+ when :values
+ throw_getter_deprecation(method)
+ configurations.map(&:config)
+ when :[]=
+ throw_setter_deprecation(method)
+
+ env_name = args[0]
+ config = args[1]
+
+ remaining_configs = configurations.reject { |db_config| db_config.env_name == env_name }
+ new_config = build_configs(env_name => config)
+ new_configs = remaining_configs + new_config
+
+ ActiveRecord::Base.configurations = new_configs
+ else
+ raise NotImplementedError, "`ActiveRecord::Base.configurations` in Rails 6 now returns an object instead of a hash. The `#{method}` method is not supported. Please use `configs_for` or consult the documentation for supported methods."
+ end
+ end
+
+ def throw_setter_deprecation(method)
+ ActiveSupport::Deprecation.warn("Setting `ActiveRecord::Base.configurations` with `#{method}` is deprecated. Use `ActiveRecord::Base.configurations=` directly to set the configurations instead.")
+ end
+
+ def throw_getter_deprecation(method)
+ ActiveSupport::Deprecation.warn("`ActiveRecord::Base.configurations` no longer returns a hash. Methods that act on the hash like `#{method}` are deprecated and will be removed in Rails 6.1. Use the `configs_for` method to collect and iterate over the database configurations.")
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/database_configurations/database_config.rb b/activerecord/lib/active_record/database_configurations/database_config.rb
new file mode 100644
index 0000000000..adc37cc439
--- /dev/null
+++ b/activerecord/lib/active_record/database_configurations/database_config.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class DatabaseConfigurations
+ # ActiveRecord::Base.configurations will return either a HashConfig or
+ # UrlConfig respectively. It will never return a DatabaseConfig object,
+ # as this is the parent class for the types of database configuration objects.
+ class DatabaseConfig # :nodoc:
+ attr_reader :env_name, :spec_name
+
+ def initialize(env_name, spec_name)
+ @env_name = env_name
+ @spec_name = spec_name
+ end
+
+ def replica?
+ raise NotImplementedError
+ end
+
+ def migrations_paths
+ raise NotImplementedError
+ end
+
+ def url_config?
+ false
+ end
+
+ def to_legacy_hash
+ { env_name => config }
+ end
+
+ def for_current_env?
+ env_name == ActiveRecord::ConnectionHandling::DEFAULT_ENV.call
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/database_configurations/hash_config.rb b/activerecord/lib/active_record/database_configurations/hash_config.rb
new file mode 100644
index 0000000000..e31ff09391
--- /dev/null
+++ b/activerecord/lib/active_record/database_configurations/hash_config.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class DatabaseConfigurations
+ # A HashConfig object is created for each database configuration entry that
+ # is created from a hash.
+ #
+ # A hash config:
+ #
+ # { "development" => { "database" => "db_name" } }
+ #
+ # Becomes:
+ #
+ # #<ActiveRecord::DatabaseConfigurations::HashConfig:0x00007fd1acbded10
+ # @env_name="development", @spec_name="primary", @config={"database"=>"db_name"}>
+ #
+ # ==== Options
+ #
+ # * <tt>:env_name</tt> - The Rails environment, i.e. "development".
+ # * <tt>:spec_name</tt> - The specification name. In a standard two-tier
+ # database configuration this will default to "primary". In a multiple
+ # database three-tier database configuration this corresponds to the name
+ # used in the second tier, for example "primary_readonly".
+ # * <tt>:config</tt> - The config hash. This is the hash that contains the
+ # database adapter, name, and other important information for database
+ # connections.
+ class HashConfig < DatabaseConfig
+ attr_reader :config
+
+ def initialize(env_name, spec_name, config)
+ super(env_name, spec_name)
+ @config = config
+ end
+
+ # Determines whether a database configuration is for a replica / readonly
+ # connection. If the +replica+ key is present in the config, +replica?+ will
+ # return +true+.
+ def replica?
+ config["replica"]
+ end
+
+ # The migrations paths for a database configuration. If the
+ # +migrations_paths+ key is present in the config, +migrations_paths+
+ # will return its value.
+ def migrations_paths
+ config["migrations_paths"]
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/database_configurations/url_config.rb b/activerecord/lib/active_record/database_configurations/url_config.rb
new file mode 100644
index 0000000000..e2d30ae416
--- /dev/null
+++ b/activerecord/lib/active_record/database_configurations/url_config.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class DatabaseConfigurations
+ # A UrlConfig object is created for each database configuration
+ # entry that is created from a URL. This can either be a URL string
+ # or a hash with a URL in place of the config hash.
+ #
+ # A URL config:
+ #
+ # postgres://localhost/foo
+ #
+ # Becomes:
+ #
+ # #<ActiveRecord::DatabaseConfigurations::UrlConfig:0x00007fdc3238f340
+ # @env_name="default_env", @spec_name="primary",
+ # @config={"adapter"=>"postgresql", "database"=>"foo", "host"=>"localhost"},
+ # @url="postgres://localhost/foo">
+ #
+ # ==== Options
+ #
+ # * <tt>:env_name</tt> - The Rails environment, ie "development".
+ # * <tt>:spec_name</tt> - The specification name. In a standard two-tier
+ # database configuration this will default to "primary". In a multiple
+ # database three-tier database configuration this corresponds to the name
+ # used in the second tier, for example "primary_readonly".
+ # * <tt>:url</tt> - The database URL.
+ # * <tt>:config</tt> - The config hash. This is the hash that contains the
+ # database adapter, name, and other important information for database
+ # connections.
+ class UrlConfig < DatabaseConfig
+ attr_reader :url, :config
+
+ def initialize(env_name, spec_name, url, config = {})
+ super(env_name, spec_name)
+ @config = build_config(config, url)
+ @url = url
+ end
+
+ def url_config? # :nodoc:
+ true
+ end
+
+ # Determines whether a database configuration is for a replica / readonly
+ # connection. If the +replica+ key is present in the config, +replica?+ will
+ # return +true+.
+ def replica?
+ config["replica"]
+ end
+
+ # The migrations paths for a database configuration. If the
+ # +migrations_paths+ key is present in the config, +migrations_paths+
+ # will return its value.
+ def migrations_paths
+ config["migrations_paths"]
+ end
+
+ private
+
+ def build_url_hash(url)
+ if url.nil? || /^jdbc:/.match?(url)
+ { "url" => url }
+ else
+ ActiveRecord::ConnectionAdapters::ConnectionSpecification::ConnectionUrlResolver.new(url).to_hash
+ end
+ end
+
+ def build_config(original_config, url)
+ hash = build_url_hash(url)
+
+ if original_config[env_name]
+ original_config[env_name].merge(hash)
+ else
+ original_config.merge(hash)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/define_callbacks.rb b/activerecord/lib/active_record/define_callbacks.rb
new file mode 100644
index 0000000000..87ecd7cec5
--- /dev/null
+++ b/activerecord/lib/active_record/define_callbacks.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ # This module exists because ActiveRecord::AttributeMethods::Dirty needs to
+ # define callbacks, but continue to have its version of +save+ be the super
+ # method of ActiveRecord::Callbacks. This will be removed when the removal
+ # of deprecated code removes this need.
+ module DefineCallbacks
+ extend ActiveSupport::Concern
+
+ module ClassMethods # :nodoc:
+ include ActiveModel::Callbacks
+ end
+
+ included do
+ include ActiveModel::Validations::Callbacks
+
+ define_model_callbacks :initialize, :find, :touch, only: :after
+ define_model_callbacks :save, :create, :update, :destroy
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/dynamic_matchers.rb b/activerecord/lib/active_record/dynamic_matchers.rb
index b6dd6814db..3bb8c6f4e3 100644
--- a/activerecord/lib/active_record/dynamic_matchers.rb
+++ b/activerecord/lib/active_record/dynamic_matchers.rb
@@ -1,121 +1,122 @@
+# frozen_string_literal: true
+
module ActiveRecord
module DynamicMatchers #:nodoc:
- def respond_to?(name, include_private = false)
- if self == Base
- super
- else
+ private
+ def respond_to_missing?(name, _)
+ if self == Base
+ super
+ else
+ match = Method.match(self, name)
+ match && match.valid? || super
+ end
+ end
+
+ def method_missing(name, *arguments, &block)
match = Method.match(self, name)
- match && match.valid? || super
+
+ if match && match.valid?
+ match.define
+ send(name, *arguments, &block)
+ else
+ super
+ end
end
- end
- private
+ class Method
+ @matchers = []
- def method_missing(name, *arguments, &block)
- match = Method.match(self, name)
+ class << self
+ attr_reader :matchers
- if match && match.valid?
- match.define
- send(name, *arguments, &block)
- else
- super
- end
- end
+ def match(model, name)
+ klass = matchers.find { |k| k.pattern.match?(name) }
+ klass.new(model, name) if klass
+ end
- class Method
- @matchers = []
+ def pattern
+ @pattern ||= /\A#{prefix}_([_a-zA-Z]\w*)#{suffix}\Z/
+ end
- class << self
- attr_reader :matchers
+ def prefix
+ raise NotImplementedError
+ end
- def match(model, name)
- klass = matchers.find { |k| name =~ k.pattern }
- klass.new(model, name) if klass
+ def suffix
+ ""
+ end
end
- def pattern
- @pattern ||= /\A#{prefix}_([_a-zA-Z]\w*)#{suffix}\Z/
- end
+ attr_reader :model, :name, :attribute_names
- def prefix
- raise NotImplementedError
+ def initialize(model, name)
+ @model = model
+ @name = name.to_s
+ @attribute_names = @name.match(self.class.pattern)[1].split("_and_")
+ @attribute_names.map! { |n| @model.attribute_aliases[n] || n }
end
- def suffix
- ''
+ def valid?
+ attribute_names.all? { |name| model.columns_hash[name] || model.reflect_on_aggregation(name.to_sym) }
end
- end
-
- attr_reader :model, :name, :attribute_names
- def initialize(model, name)
- @model = model
- @name = name.to_s
- @attribute_names = @name.match(self.class.pattern)[1].split('_and_')
- @attribute_names.map! { |n| @model.attribute_aliases[n] || n }
- end
+ def define
+ model.class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def self.#{name}(#{signature})
+ #{body}
+ end
+ CODE
+ end
- def valid?
- attribute_names.all? { |name| model.columns_hash[name] || model.reflect_on_aggregation(name.to_sym) }
- end
+ private
- def define
- model.class_eval <<-CODE, __FILE__, __LINE__ + 1
- def self.#{name}(#{signature})
- #{body}
+ def body
+ "#{finder}(#{attributes_hash})"
end
- CODE
- end
-
- private
-
- def body
- "#{finder}(#{attributes_hash})"
- end
- # The parameters in the signature may have reserved Ruby words, in order
- # to prevent errors, we start each param name with `_`.
- def signature
- attribute_names.map { |name| "_#{name}" }.join(', ')
- end
+ # The parameters in the signature may have reserved Ruby words, in order
+ # to prevent errors, we start each param name with `_`.
+ def signature
+ attribute_names.map { |name| "_#{name}" }.join(", ")
+ end
- # Given that the parameters starts with `_`, the finder needs to use the
- # same parameter name.
- def attributes_hash
- "{" + attribute_names.map { |name| ":#{name} => _#{name}" }.join(',') + "}"
- end
+ # Given that the parameters starts with `_`, the finder needs to use the
+ # same parameter name.
+ def attributes_hash
+ "{" + attribute_names.map { |name| ":#{name} => _#{name}" }.join(",") + "}"
+ end
- def finder
- raise NotImplementedError
+ def finder
+ raise NotImplementedError
+ end
end
- end
- class FindBy < Method
- Method.matchers << self
+ class FindBy < Method
+ Method.matchers << self
- def self.prefix
- "find_by"
- end
+ def self.prefix
+ "find_by"
+ end
- def finder
- "find_by"
+ def finder
+ "find_by"
+ end
end
- end
- class FindByBang < Method
- Method.matchers << self
+ class FindByBang < Method
+ Method.matchers << self
- def self.prefix
- "find_by"
- end
+ def self.prefix
+ "find_by"
+ end
- def self.suffix
- "!"
- end
+ def self.suffix
+ "!"
+ end
- def finder
- "find_by!"
+ def finder
+ "find_by!"
+ end
end
- end
end
end
diff --git a/activerecord/lib/active_record/enum.rb b/activerecord/lib/active_record/enum.rb
index 7be332fb97..8077630aeb 100644
--- a/activerecord/lib/active_record/enum.rb
+++ b/activerecord/lib/active_record/enum.rb
@@ -1,4 +1,6 @@
-require 'active_support/core_ext/object/deep_dup'
+# frozen_string_literal: true
+
+require "active_support/core_ext/object/deep_dup"
module ActiveRecord
# Declare an enum attribute where the values map to integers in the database,
@@ -29,7 +31,9 @@ module ActiveRecord
# as well. With the above example:
#
# Conversation.active
+ # Conversation.not_active
# Conversation.archived
+ # Conversation.not_archived
#
# Of course, you can also query them directly if the scopes don't fit your
# needs:
@@ -95,8 +99,7 @@ module ActiveRecord
module Enum
def self.extended(base) # :nodoc:
- base.class_attribute(:defined_enums, instance_writer: false)
- base.defined_enums = {}
+ base.class_attribute(:defined_enums, instance_writer: false, default: {})
end
def inherited(base) # :nodoc:
@@ -105,6 +108,8 @@ module ActiveRecord
end
class EnumType < Type::Value # :nodoc:
+ delegate :type, to: :subtype
+
def initialize(name, mapping, subtype)
@name = name
@mapping = mapping
@@ -138,34 +143,37 @@ module ActiveRecord
end
end
- protected
-
- attr_reader :name, :mapping, :subtype
+ private
+ attr_reader :name, :mapping, :subtype
end
def enum(definitions)
klass = self
enum_prefix = definitions.delete(:_prefix)
enum_suffix = definitions.delete(:_suffix)
+ enum_scopes = definitions.delete(:_scopes)
definitions.each do |name, values|
+ assert_valid_enum_definition_values(values)
# statuses = { }
enum_values = ActiveSupport::HashWithIndifferentAccess.new
- name = name.to_sym
+ name = name.to_s
# def self.statuses() statuses end
- detect_enum_conflict!(name, name.to_s.pluralize, true)
- klass.singleton_class.send(:define_method, name.to_s.pluralize) { enum_values }
+ detect_enum_conflict!(name, name.pluralize, true)
+ singleton_class.define_method(name.pluralize) { enum_values }
+ defined_enums[name] = enum_values
detect_enum_conflict!(name, name)
detect_enum_conflict!(name, "#{name}=")
- decorate_attribute_type(name, :enum) do |subtype|
- EnumType.new(name, enum_values, subtype)
+ attr = attribute_alias?(name) ? attribute_alias(name) : name
+ decorate_attribute_type(attr, :enum) do |subtype|
+ EnumType.new(attr, enum_values, subtype)
end
_enum_methods_module.module_eval do
pairs = values.respond_to?(:each_pair) ? values.each_pair : values.each_with_index
- pairs.each do |value, i|
+ pairs.each do |label, value|
if enum_prefix == true
prefix = "#{name}_"
elsif enum_prefix
@@ -177,23 +185,30 @@ module ActiveRecord
suffix = "_#{enum_suffix}"
end
- value_method_name = "#{prefix}#{value}#{suffix}"
- enum_values[value] = i
+ value_method_name = "#{prefix}#{label}#{suffix}"
+ enum_values[label] = value
+ label = label.to_s
- # def active?() status == 0 end
+ # def active?() status == "active" end
klass.send(:detect_enum_conflict!, name, "#{value_method_name}?")
- define_method("#{value_method_name}?") { self[name] == value.to_s }
+ define_method("#{value_method_name}?") { self[attr] == label }
- # def active!() update! status: :active end
+ # def active!() update!(status: 0) end
klass.send(:detect_enum_conflict!, name, "#{value_method_name}!")
- define_method("#{value_method_name}!") { update! name => value }
+ define_method("#{value_method_name}!") { update!(attr => value) }
- # scope :active, -> { where status: 0 }
- klass.send(:detect_enum_conflict!, name, value_method_name, true)
- klass.scope value_method_name, -> { where(name => value) }
+ # scope :active, -> { where(status: 0) }
+ # scope :not_active, -> { where.not(status: 0) }
+ if enum_scopes != false
+ klass.send(:detect_enum_conflict!, name, value_method_name, true)
+ klass.scope value_method_name, -> { where(attr => value) }
+
+ klass.send(:detect_enum_conflict!, name, "not_#{value_method_name}", true)
+ klass.scope "not_#{value_method_name}", -> { where.not(attr => value) }
+ end
end
end
- defined_enums[name.to_s] = enum_values
+ enum_values.freeze
end
end
@@ -206,25 +221,41 @@ module ActiveRecord
end
end
+ def assert_valid_enum_definition_values(values)
+ unless values.is_a?(Hash) || values.all? { |v| v.is_a?(Symbol) } || values.all? { |v| v.is_a?(String) }
+ error_message = <<~MSG
+ Enum values #{values} must be either a hash, an array of symbols, or an array of strings.
+ MSG
+ raise ArgumentError, error_message
+ end
+
+ if values.is_a?(Hash) && values.keys.any?(&:blank?) || values.is_a?(Array) && values.any?(&:blank?)
+ raise ArgumentError, "Enum label name must not be blank."
+ end
+ end
+
ENUM_CONFLICT_MESSAGE = \
"You tried to define an enum named \"%{enum}\" on the model \"%{klass}\", but " \
"this will generate a %{type} method \"%{method}\", which is already defined " \
"by %{source}."
+ private_constant :ENUM_CONFLICT_MESSAGE
def detect_enum_conflict!(enum_name, method_name, klass_method = false)
if klass_method && dangerous_class_method?(method_name)
- raise_conflict_error(enum_name, method_name, type: 'class')
+ raise_conflict_error(enum_name, method_name, type: "class")
+ elsif klass_method && method_defined_within?(method_name, Relation)
+ raise_conflict_error(enum_name, method_name, type: "class", source: Relation.name)
elsif !klass_method && dangerous_attribute_method?(method_name)
raise_conflict_error(enum_name, method_name)
elsif !klass_method && method_defined_within?(method_name, _enum_methods_module, Module)
- raise_conflict_error(enum_name, method_name, source: 'another enum')
+ raise_conflict_error(enum_name, method_name, source: "another enum")
end
end
- def raise_conflict_error(enum_name, method_name, type: 'instance', source: 'Active Record')
+ def raise_conflict_error(enum_name, method_name, type: "instance", source: "Active Record")
raise ArgumentError, ENUM_CONFLICT_MESSAGE % {
enum: enum_name,
- klass: self.name,
+ klass: name,
type: type,
method: method_name,
source: source
diff --git a/activerecord/lib/active_record/errors.rb b/activerecord/lib/active_record/errors.rb
index 2ec9bf3d67..60cf9818c1 100644
--- a/activerecord/lib/active_record/errors.rb
+++ b/activerecord/lib/active_record/errors.rb
@@ -1,5 +1,6 @@
-module ActiveRecord
+# frozen_string_literal: true
+module ActiveRecord
# = Active Record Errors
#
# Generic Active Record exception class.
@@ -44,10 +45,14 @@ module ActiveRecord
# Raised when connection to the database could not been established (for example when
# {ActiveRecord::Base.connection=}[rdoc-ref:ConnectionHandling#connection]
- # is given a nil object).
+ # is given a +nil+ object).
class ConnectionNotEstablished < ActiveRecordError
end
+ # Raised when a write to the database is attempted on a read only connection.
+ class ReadOnlyError < ActiveRecordError
+ end
+
# Raised when Active Record cannot find a record by given id or set of ids.
class RecordNotFound < ActiveRecordError
attr_reader :model, :primary_key, :id
@@ -63,7 +68,7 @@ module ActiveRecord
# Raised by {ActiveRecord::Base#save!}[rdoc-ref:Persistence#save!] and
# {ActiveRecord::Base.create!}[rdoc-ref:Persistence::ClassMethods#create!]
- # methods when a record is invalid and can not be saved.
+ # methods when a record is invalid and cannot be saved.
class RecordNotSaved < ActiveRecordError
attr_reader :record
@@ -96,20 +101,13 @@ module ActiveRecord
#
# Wraps the underlying database error as +cause+.
class StatementInvalid < ActiveRecordError
-
- def initialize(message = nil, original_exception = nil)
- if original_exception
- ActiveSupport::Deprecation.warn("Passing #original_exception is deprecated and has no effect. " \
- "Exceptions will automatically capture the original exception.", caller)
- end
-
+ def initialize(message = nil, sql: nil, binds: nil)
super(message || $!.try(:message))
+ @sql = sql
+ @binds = binds
end
- def original_exception
- ActiveSupport::Deprecation.warn("#original_exception is deprecated. Use #cause instead.", caller)
- cause
- end
+ attr_reader :sql, :binds
end
# Defunct wrapper class kept for compatibility.
@@ -117,14 +115,60 @@ module ActiveRecord
class WrappedDatabaseException < StatementInvalid
end
- # Raised when a record cannot be inserted because it would violate a uniqueness constraint.
+ # Raised when a record cannot be inserted or updated because it would violate a uniqueness constraint.
class RecordNotUnique < WrappedDatabaseException
end
- # Raised when a record cannot be inserted or updated because it references a non-existent record.
+ # Raised when a record cannot be inserted or updated because it references a non-existent record,
+ # or when a record cannot be deleted because a parent record references it.
class InvalidForeignKey < WrappedDatabaseException
end
+ # Raised when a foreign key constraint cannot be added because the column type does not match the referenced column type.
+ class MismatchedForeignKey < StatementInvalid
+ def initialize(
+ message: nil,
+ sql: nil,
+ binds: nil,
+ table: nil,
+ foreign_key: nil,
+ target_table: nil,
+ primary_key: nil,
+ primary_key_column: nil
+ )
+ if table
+ type = primary_key_column.bigint? ? :bigint : primary_key_column.type
+ msg = <<~EOM.squish
+ Column `#{foreign_key}` on table `#{table}` does not match column `#{primary_key}` on `#{target_table}`,
+ which has type `#{primary_key_column.sql_type}`.
+ To resolve this issue, change the type of the `#{foreign_key}` column on `#{table}` to be :#{type}.
+ (For example `t.#{type} :#{foreign_key}`).
+ EOM
+ else
+ msg = <<~EOM.squish
+ There is a mismatch between the foreign key and primary key column types.
+ Verify that the foreign key column type and the primary key of the associated table match types.
+ EOM
+ end
+ if message
+ msg << "\nOriginal message: #{message}"
+ end
+ super(msg, sql: sql, binds: binds)
+ end
+ end
+
+ # Raised when a record cannot be inserted or updated because it would violate a not null constraint.
+ class NotNullViolation < StatementInvalid
+ end
+
+ # Raised when a record cannot be inserted or updated because a value too long for a column type.
+ class ValueTooLong < StatementInvalid
+ end
+
+ # Raised when values that executed are out of range.
+ class RangeError < StatementInvalid
+ end
+
# Raised when number of bind variables in statement given to +:condition+ key
# (for example, when using {ActiveRecord::Base.find}[rdoc-ref:FinderMethods#find] method)
# does not match number of expected values supplied.
@@ -139,7 +183,7 @@ module ActiveRecord
class NoDatabaseError < StatementInvalid
end
- # Raised when Postgres returns 'cached plan must not change result type' and
+ # Raised when PostgreSQL returns 'cached plan must not change result type' and
# we cannot retry gracefully (e.g. inside a transaction)
class PreparedStatementCacheExpired < StatementInvalid
end
@@ -162,7 +206,6 @@ module ActiveRecord
super("Stale object error.")
end
end
-
end
# Raised when association is being configured improperly or user tries to use
@@ -281,8 +324,65 @@ module ActiveRecord
class TransactionIsolationError < ActiveRecordError
end
+ # TransactionRollbackError will be raised when a transaction is rolled
+ # back by the database due to a serialization failure or a deadlock.
+ #
+ # See the following:
+ #
+ # * https://www.postgresql.org/docs/current/static/transaction-iso.html
+ # * https://dev.mysql.com/doc/refman/5.7/en/error-messages-server.html#error_er_lock_deadlock
+ class TransactionRollbackError < StatementInvalid
+ end
+
+ # SerializationFailure will be raised when a transaction is rolled
+ # back by the database due to a serialization failure.
+ class SerializationFailure < TransactionRollbackError
+ end
+
+ # Deadlocked will be raised when a transaction is rolled
+ # back by the database when a deadlock is encountered.
+ class Deadlocked < TransactionRollbackError
+ end
+
# IrreversibleOrderError is raised when a relation's order is too complex for
# +reverse_order+ to automatically reverse.
class IrreversibleOrderError < ActiveRecordError
end
+
+ # LockWaitTimeout will be raised when lock wait timeout exceeded.
+ class LockWaitTimeout < StatementInvalid
+ end
+
+ # StatementTimeout will be raised when statement timeout exceeded.
+ class StatementTimeout < StatementInvalid
+ end
+
+ # QueryCanceled will be raised when canceling statement due to user request.
+ class QueryCanceled < StatementInvalid
+ end
+
+ # UnknownAttributeReference is raised when an unknown and potentially unsafe
+ # value is passed to a query method when allow_unsafe_raw_sql is set to
+ # :disabled. For example, passing a non column name value to a relation's
+ # #order method might cause this exception.
+ #
+ # When working around this exception, caution should be taken to avoid SQL
+ # injection vulnerabilities when passing user-provided values to query
+ # methods. Known-safe values can be passed to query methods by wrapping them
+ # in Arel.sql.
+ #
+ # For example, with allow_unsafe_raw_sql set to :disabled, the following
+ # code would raise this exception:
+ #
+ # Post.order("length(title)").first
+ #
+ # The desired result can be accomplished by wrapping the known-safe string
+ # in Arel.sql:
+ #
+ # Post.order(Arel.sql("length(title)")).first
+ #
+ # Again, such a workaround should *not* be used when passing user-provided
+ # values, such as request parameters or model attributes to query methods.
+ class UnknownAttributeReference < ActiveRecordError
+ end
end
diff --git a/activerecord/lib/active_record/explain.rb b/activerecord/lib/active_record/explain.rb
index 727a9befc1..919e96cd7a 100644
--- a/activerecord/lib/active_record/explain.rb
+++ b/activerecord/lib/active_record/explain.rb
@@ -1,5 +1,6 @@
-require 'active_support/lazy_load_hooks'
-require 'active_record/explain_registry'
+# frozen_string_literal: true
+
+require "active_record/explain_registry"
module ActiveRecord
module Explain
@@ -16,15 +17,14 @@ module ActiveRecord
# Makes the adapter execute EXPLAIN for the tuples of queries and bindings.
# Returns a formatted string ready to be logged.
def exec_explain(queries) # :nodoc:
- str = queries.map do |sql, bind|
- [].tap do |msg|
- msg << "EXPLAIN for: #{sql}"
- unless bind.empty?
- bind_msg = bind.map {|col, val| [col.name, val]}.inspect
- msg.last << " #{bind_msg}"
- end
- msg << connection.explain(sql, bind)
- end.join("\n")
+ str = queries.map do |sql, binds|
+ msg = +"EXPLAIN for: #{sql}"
+ unless binds.empty?
+ msg << " "
+ msg << binds.map { |attr| render_bind(attr) }.inspect
+ end
+ msg << "\n"
+ msg << connection.explain(sql, binds)
end.join("\n")
# Overriding inspect to be more human readable, especially in the console.
@@ -34,5 +34,17 @@ module ActiveRecord
str
end
+
+ private
+
+ def render_bind(attr)
+ value = if attr.type.binary? && attr.value
+ "<#{attr.value_for_database.to_s.bytesize} bytes of binary data>"
+ else
+ connection.type_cast(attr.value_for_database)
+ end
+
+ [attr.name, value]
+ end
end
end
diff --git a/activerecord/lib/active_record/explain_registry.rb b/activerecord/lib/active_record/explain_registry.rb
index b652932f9c..7fd078941a 100644
--- a/activerecord/lib/active_record/explain_registry.rb
+++ b/activerecord/lib/active_record/explain_registry.rb
@@ -1,4 +1,6 @@
-require 'active_support/per_thread_registry'
+# frozen_string_literal: true
+
+require "active_support/per_thread_registry"
module ActiveRecord
# This is a thread locals registry for EXPLAIN. For example
diff --git a/activerecord/lib/active_record/explain_subscriber.rb b/activerecord/lib/active_record/explain_subscriber.rb
index 90bcf5a205..a86217abc0 100644
--- a/activerecord/lib/active_record/explain_subscriber.rb
+++ b/activerecord/lib/active_record/explain_subscriber.rb
@@ -1,5 +1,7 @@
-require 'active_support/notifications'
-require 'active_record/explain_registry'
+# frozen_string_literal: true
+
+require "active_support/notifications"
+require "active_record/explain_registry"
module ActiveRecord
class ExplainSubscriber # :nodoc:
@@ -18,10 +20,13 @@ module ActiveRecord
#
# On the other hand, we want to monitor the performance of our real database
# queries, not the performance of the access to the query cache.
- IGNORED_PAYLOADS = %w(SCHEMA EXPLAIN CACHE)
+ IGNORED_PAYLOADS = %w(SCHEMA EXPLAIN)
EXPLAINED_SQLS = /\A\s*(with|select|update|delete|insert)\b/i
def ignore_payload?(payload)
- payload[:exception] || IGNORED_PAYLOADS.include?(payload[:name]) || payload[:sql] !~ EXPLAINED_SQLS
+ payload[:exception] ||
+ payload[:cached] ||
+ IGNORED_PAYLOADS.include?(payload[:name]) ||
+ payload[:sql] !~ EXPLAINED_SQLS
end
ActiveSupport::Notifications.subscribe("sql.active_record", new)
diff --git a/activerecord/lib/active_record/fixture_set/file.rb b/activerecord/lib/active_record/fixture_set/file.rb
index e4a44244e2..f1ea0e022f 100644
--- a/activerecord/lib/active_record/fixture_set/file.rb
+++ b/activerecord/lib/active_record/fixture_set/file.rb
@@ -1,5 +1,7 @@
-require 'erb'
-require 'yaml'
+# frozen_string_literal: true
+
+require "erb"
+require "yaml"
module ActiveRecord
class FixtureSet
@@ -24,21 +26,21 @@ module ActiveRecord
end
def model_class
- config_row['model_class']
+ config_row["model_class"]
end
private
def rows
- @rows ||= raw_rows.reject { |fixture_name, _| fixture_name == '_fixture' }
+ @rows ||= raw_rows.reject { |fixture_name, _| fixture_name == "_fixture" }
end
def config_row
@config_row ||= begin
- row = raw_rows.find { |fixture_name, _| fixture_name == '_fixture' }
+ row = raw_rows.find { |fixture_name, _| fixture_name == "_fixture" }
if row
row.last
else
- {'model_class': nil}
+ { 'model_class': nil }
end
end
end
@@ -66,10 +68,13 @@ module ActiveRecord
# Validate our unmarshalled data.
def validate(data)
unless Hash === data || YAML::Omap === data
- raise Fixture::FormatError, 'fixture is not a hash'
+ raise Fixture::FormatError, "fixture is not a hash: #{@file}"
end
- raise Fixture::FormatError unless data.all? { |name, row| Hash === row }
+ invalid = data.reject { |_, row| Hash === row }
+ if invalid.any?
+ raise Fixture::FormatError, "fixture key is not a hash: #{@file}, keys: #{invalid.keys.inspect}"
+ end
data
end
end
diff --git a/activerecord/lib/active_record/fixture_set/model_metadata.rb b/activerecord/lib/active_record/fixture_set/model_metadata.rb
new file mode 100644
index 0000000000..fb23df6f45
--- /dev/null
+++ b/activerecord/lib/active_record/fixture_set/model_metadata.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class FixtureSet
+ class ModelMetadata # :nodoc:
+ def initialize(model_class)
+ @model_class = model_class
+ end
+
+ def primary_key_name
+ @primary_key_name ||= @model_class && @model_class.primary_key
+ end
+
+ def primary_key_type
+ @primary_key_type ||= @model_class && @model_class.type_for_attribute(@model_class.primary_key).type
+ end
+
+ def has_primary_key_column?
+ @has_primary_key_column ||= primary_key_name &&
+ @model_class.columns.any? { |col| col.name == primary_key_name }
+ end
+
+ def timestamp_column_names
+ @timestamp_column_names ||=
+ %w(created_at created_on updated_at updated_on) & @model_class.column_names
+ end
+
+ def inheritance_column_name
+ @inheritance_column_name ||= @model_class && @model_class.inheritance_column
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/fixture_set/render_context.rb b/activerecord/lib/active_record/fixture_set/render_context.rb
new file mode 100644
index 0000000000..c90b5343dc
--- /dev/null
+++ b/activerecord/lib/active_record/fixture_set/render_context.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+# NOTE: This class has to be defined in compact style in
+# order for rendering context subclassing to work correctly.
+class ActiveRecord::FixtureSet::RenderContext # :nodoc:
+ def self.create_subclass
+ Class.new(ActiveRecord::FixtureSet.context_class) do
+ def get_binding
+ binding()
+ end
+
+ def binary(path)
+ %(!!binary "#{Base64.strict_encode64(File.read(path))}")
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/fixture_set/table_row.rb b/activerecord/lib/active_record/fixture_set/table_row.rb
new file mode 100644
index 0000000000..cb4726f1ee
--- /dev/null
+++ b/activerecord/lib/active_record/fixture_set/table_row.rb
@@ -0,0 +1,153 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class FixtureSet
+ class TableRow # :nodoc:
+ class ReflectionProxy # :nodoc:
+ def initialize(association)
+ @association = association
+ end
+
+ def join_table
+ @association.join_table
+ end
+
+ def name
+ @association.name
+ end
+
+ def primary_key_type
+ @association.klass.type_for_attribute(@association.klass.primary_key).type
+ end
+ end
+
+ class HasManyThroughProxy < ReflectionProxy # :nodoc:
+ def rhs_key
+ @association.foreign_key
+ end
+
+ def lhs_key
+ @association.through_reflection.foreign_key
+ end
+
+ def join_table
+ @association.through_reflection.table_name
+ end
+ end
+
+ def initialize(fixture, table_rows:, label:, now:)
+ @table_rows = table_rows
+ @label = label
+ @now = now
+ @row = fixture.to_hash
+ fill_row_model_attributes
+ end
+
+ def to_hash
+ @row
+ end
+
+ private
+
+ def model_metadata
+ @table_rows.model_metadata
+ end
+
+ def model_class
+ @table_rows.model_class
+ end
+
+ def fill_row_model_attributes
+ return unless model_class
+ fill_timestamps
+ interpolate_label
+ generate_primary_key
+ resolve_enums
+ resolve_sti_reflections
+ end
+
+ def reflection_class
+ @reflection_class ||= if @row.include?(model_metadata.inheritance_column_name)
+ @row[model_metadata.inheritance_column_name].constantize rescue model_class
+ else
+ model_class
+ end
+ end
+
+ def fill_timestamps
+ # fill in timestamp columns if they aren't specified and the model is set to record_timestamps
+ if model_class.record_timestamps
+ model_metadata.timestamp_column_names.each do |c_name|
+ @row[c_name] = @now unless @row.key?(c_name)
+ end
+ end
+ end
+
+ def interpolate_label
+ # interpolate the fixture label
+ @row.each do |key, value|
+ @row[key] = value.gsub("$LABEL", @label.to_s) if value.is_a?(String)
+ end
+ end
+
+ def generate_primary_key
+ # generate a primary key if necessary
+ if model_metadata.has_primary_key_column? && !@row.include?(model_metadata.primary_key_name)
+ @row[model_metadata.primary_key_name] = ActiveRecord::FixtureSet.identify(
+ @label, model_metadata.primary_key_type
+ )
+ end
+ end
+
+ def resolve_enums
+ model_class.defined_enums.each do |name, values|
+ if @row.include?(name)
+ @row[name] = values.fetch(@row[name], @row[name])
+ end
+ end
+ end
+
+ def resolve_sti_reflections
+ # If STI is used, find the correct subclass for association reflection
+ reflection_class._reflections.each_value do |association|
+ case association.macro
+ when :belongs_to
+ # Do not replace association name with association foreign key if they are named the same
+ fk_name = (association.options[:foreign_key] || "#{association.name}_id").to_s
+
+ if association.name.to_s != fk_name && value = @row.delete(association.name.to_s)
+ if association.polymorphic? && value.sub!(/\s*\(([^\)]*)\)\s*$/, "")
+ # support polymorphic belongs_to as "label (Type)"
+ @row[association.foreign_type] = $1
+ end
+
+ fk_type = reflection_class.type_for_attribute(fk_name).type
+ @row[fk_name] = ActiveRecord::FixtureSet.identify(value, fk_type)
+ end
+ when :has_many
+ if association.options[:through]
+ add_join_records(HasManyThroughProxy.new(association))
+ end
+ end
+ end
+ end
+
+ def add_join_records(association)
+ # This is the case when the join table has no fixtures file
+ if (targets = @row.delete(association.name.to_s))
+ table_name = association.join_table
+ column_type = association.primary_key_type
+ lhs_key = association.lhs_key
+ rhs_key = association.rhs_key
+
+ targets = targets.is_a?(Array) ? targets : targets.split(/\s*,\s*/)
+ joins = targets.map do |target|
+ { lhs_key => @row[model_metadata.primary_key_name],
+ rhs_key => ActiveRecord::FixtureSet.identify(target, column_type) }
+ end
+ @table_rows.tables[table_name].concat(joins)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/fixture_set/table_rows.rb b/activerecord/lib/active_record/fixture_set/table_rows.rb
new file mode 100644
index 0000000000..23814b6cb5
--- /dev/null
+++ b/activerecord/lib/active_record/fixture_set/table_rows.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require "active_record/fixture_set/table_row"
+require "active_record/fixture_set/model_metadata"
+
+module ActiveRecord
+ class FixtureSet
+ class TableRows # :nodoc:
+ def initialize(table_name, model_class:, fixtures:, config:)
+ @model_class = model_class
+
+ # track any join tables we need to insert later
+ @tables = Hash.new { |h, table| h[table] = [] }
+
+ # ensure this table is loaded before any HABTM associations
+ @tables[table_name] = nil
+
+ build_table_rows_from(table_name, fixtures, config)
+ end
+
+ attr_reader :tables, :model_class
+
+ def to_hash
+ @tables.transform_values { |rows| rows.map(&:to_hash) }
+ end
+
+ def model_metadata
+ @model_metadata ||= ModelMetadata.new(model_class)
+ end
+
+ private
+
+ def build_table_rows_from(table_name, fixtures, config)
+ now = config.default_timezone == :utc ? Time.now.utc : Time.now
+
+ @tables[table_name] = fixtures.map do |label, fixture|
+ TableRow.new(
+ fixture,
+ table_rows: self,
+ label: label,
+ now: now,
+ )
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/fixtures.rb b/activerecord/lib/active_record/fixtures.rb
index ed1bbf5dcd..327121a2a2 100644
--- a/activerecord/lib/active_record/fixtures.rb
+++ b/activerecord/lib/active_record/fixtures.rb
@@ -1,11 +1,16 @@
-require 'erb'
-require 'yaml'
-require 'zlib'
-require 'set'
-require 'active_support/dependencies'
-require 'active_support/core_ext/digest/uuid'
-require 'active_record/fixture_set/file'
-require 'active_record/errors'
+# frozen_string_literal: true
+
+require "erb"
+require "yaml"
+require "zlib"
+require "set"
+require "active_support/dependencies"
+require "active_support/core_ext/digest/uuid"
+require "active_record/fixture_set/file"
+require "active_record/fixture_set/render_context"
+require "active_record/fixture_set/table_rows"
+require "active_record/test_fixtures"
+require "active_record/errors"
module ActiveRecord
class FixtureClassNotFound < ActiveRecord::ActiveRecordError #:nodoc:
@@ -66,17 +71,36 @@ module ActiveRecord
# By default, +test_helper.rb+ will load all of your fixtures into your test
# database, so this test will succeed.
#
- # The testing environment will automatically load the all fixtures into the database before each
+ # The testing environment will automatically load all the fixtures into the database before each
# test. To ensure consistent data, the environment deletes the fixtures before running the load.
#
# In addition to being available in the database, the fixture's data may also be accessed by
- # using a special dynamic method, which has the same name as the model, and accepts the
- # name of the fixture to instantiate:
+ # using a special dynamic method, which has the same name as the model.
#
- # test "find" do
+ # Passing in a fixture name to this dynamic method returns the fixture matching this name:
+ #
+ # test "find one" do
# assert_equal "Ruby on Rails", web_sites(:rubyonrails).name
# end
#
+ # Passing in multiple fixture names returns all fixtures matching these names:
+ #
+ # test "find all by name" do
+ # assert_equal 2, web_sites(:rubyonrails, :google).length
+ # end
+ #
+ # Passing in no arguments returns all fixtures:
+ #
+ # test "find all" do
+ # assert_equal 2, web_sites.length
+ # end
+ #
+ # Passing in any fixture name that does not exist will raise <tt>StandardError</tt>:
+ #
+ # test "find by name that does not exist" do
+ # assert_raise(StandardError) { web_sites(:reddit) }
+ # end
+ #
# Alternatively, you may enable auto-instantiation of the fixture data. For instance, take the
# following tests:
#
@@ -88,7 +112,7 @@ module ActiveRecord
# assert_equal "Ruby on Rails", @rubyonrails.name
# end
#
- # In order to use these methods to access fixtured data within your testcases, you must specify one of the
+ # In order to use these methods to access fixtured data within your test cases, you must specify one of the
# following in your ActiveSupport::TestCase-derived class:
#
# - to fully enable instantiated fixtures (enable alternate methods #1 and #2 above)
@@ -103,7 +127,7 @@ module ActiveRecord
#
# = Dynamic fixtures with ERB
#
- # Some times you don't care about the content of the fixtures as much as you care about the volume.
+ # Sometimes you don't care about the content of the fixtures as much as you care about the volume.
# In these cases, you can mix ERB in with your YAML fixtures to create a bunch of fixtures for load
# testing, like:
#
@@ -126,7 +150,7 @@ module ActiveRecord
# unwanted inter-test dependencies. Methods used by multiple fixtures should be defined in a module
# that is included in ActiveRecord::FixtureSet.context_class.
#
- # - define a helper method in `test_helper.rb`
+ # - define a helper method in <tt>test_helper.rb</tt>
# module FixtureFileHelpers
# def file_sha(path)
# Digest::SHA2.hexdigest(File.read(Rails.root.join('test/fixtures', path)))
@@ -148,18 +172,18 @@ module ActiveRecord
# self.use_transactional_tests = true
#
# test "godzilla" do
- # assert !Foo.all.empty?
+ # assert_not_empty Foo.all
# Foo.destroy_all
- # assert Foo.all.empty?
+ # assert_empty Foo.all
# end
#
# test "godzilla aftermath" do
- # assert !Foo.all.empty?
+ # assert_not_empty Foo.all
# end
# end
#
- # If you preload your test database with all fixture data (probably in the rake task) and use
- # transactional tests, then you may omit all fixtures declarations in your test cases since
+ # If you preload your test database with all fixture data (probably by running `rails db:fixtures:load`)
+ # and use transactional tests, then you may omit all fixtures declarations in your test cases since
# all the data's already there and every case rolls back its changes.
#
# In order to use instantiated fixtures with preloaded data, set +self.pre_loaded_fixtures+ to
@@ -415,180 +439,186 @@ module ActiveRecord
# possibly in a folder with the same name.
#++
- MAX_ID = 2 ** 30 - 1
-
- @@all_cached_fixtures = Hash.new { |h,k| h[k] = {} }
+ MAX_ID = 2**30 - 1
- def self.default_fixture_model_name(fixture_set_name, config = ActiveRecord::Base) # :nodoc:
- config.pluralize_table_names ?
- fixture_set_name.singularize.camelize :
- fixture_set_name.camelize
- end
-
- def self.default_fixture_table_name(fixture_set_name, config = ActiveRecord::Base) # :nodoc:
- "#{ config.table_name_prefix }"\
- "#{ fixture_set_name.tr('/', '_') }"\
- "#{ config.table_name_suffix }".to_sym
- end
+ @@all_cached_fixtures = Hash.new { |h, k| h[k] = {} }
- def self.reset_cache
- @@all_cached_fixtures.clear
- end
+ cattr_accessor :all_loaded_fixtures, default: {}
- def self.cache_for_connection(connection)
- @@all_cached_fixtures[connection]
- end
+ class ClassCache
+ def initialize(class_names, config)
+ @class_names = class_names.stringify_keys
+ @config = config
- def self.fixture_is_cached?(connection, table_name)
- cache_for_connection(connection)[table_name]
- end
+ # Remove string values that aren't constants or subclasses of AR
+ @class_names.delete_if do |klass_name, klass|
+ !insert_class(@class_names, klass_name, klass)
+ end
+ end
- def self.cached_fixtures(connection, keys_to_fetch = nil)
- if keys_to_fetch
- cache_for_connection(connection).values_at(*keys_to_fetch)
- else
- cache_for_connection(connection).values
+ def [](fs_name)
+ @class_names.fetch(fs_name) do
+ klass = default_fixture_model(fs_name, @config).safe_constantize
+ insert_class(@class_names, fs_name, klass)
+ end
end
- end
- def self.cache_fixtures(connection, fixtures_map)
- cache_for_connection(connection).update(fixtures_map)
- end
+ private
- def self.instantiate_fixtures(object, fixture_set, load_instances = true)
- if load_instances
- fixture_set.each do |fixture_name, fixture|
- begin
- object.instance_variable_set "@#{fixture_name}", fixture.find
- rescue FixtureClassNotFound
- nil
+ def insert_class(class_names, name, klass)
+ # We only want to deal with AR objects.
+ if klass && klass < ActiveRecord::Base
+ class_names[name] = klass
+ else
+ class_names[name] = nil
end
end
- end
- end
- def self.instantiate_all_loaded_fixtures(object, load_instances = true)
- all_loaded_fixtures.each_value do |fixture_set|
- instantiate_fixtures(object, fixture_set, load_instances)
- end
+ def default_fixture_model(fs_name, config)
+ ActiveRecord::FixtureSet.default_fixture_model_name(fs_name, config)
+ end
end
- cattr_accessor :all_loaded_fixtures
- self.all_loaded_fixtures = {}
+ class << self
+ def default_fixture_model_name(fixture_set_name, config = ActiveRecord::Base) # :nodoc:
+ config.pluralize_table_names ?
+ fixture_set_name.singularize.camelize :
+ fixture_set_name.camelize
+ end
- class ClassCache
- def initialize(class_names, config)
- @class_names = class_names.stringify_keys
- @config = config
+ def default_fixture_table_name(fixture_set_name, config = ActiveRecord::Base) # :nodoc:
+ "#{ config.table_name_prefix }"\
+ "#{ fixture_set_name.tr('/', '_') }"\
+ "#{ config.table_name_suffix }".to_sym
+ end
- # Remove string values that aren't constants or subclasses of AR
- @class_names.delete_if { |klass_name, klass| !insert_class(@class_names, klass_name, klass) }
+ def reset_cache
+ @@all_cached_fixtures.clear
end
- def [](fs_name)
- @class_names.fetch(fs_name) {
- klass = default_fixture_model(fs_name, @config).safe_constantize
- insert_class(@class_names, fs_name, klass)
- }
+ def cache_for_connection(connection)
+ @@all_cached_fixtures[connection]
end
- private
+ def fixture_is_cached?(connection, table_name)
+ cache_for_connection(connection)[table_name]
+ end
- def insert_class(class_names, name, klass)
- # We only want to deal with AR objects.
- if klass && klass < ActiveRecord::Base
- class_names[name] = klass
+ def cached_fixtures(connection, keys_to_fetch = nil)
+ if keys_to_fetch
+ cache_for_connection(connection).values_at(*keys_to_fetch)
else
- class_names[name] = nil
+ cache_for_connection(connection).values
end
end
- def default_fixture_model(fs_name, config)
- ActiveRecord::FixtureSet.default_fixture_model_name(fs_name, config)
+ def cache_fixtures(connection, fixtures_map)
+ cache_for_connection(connection).update(fixtures_map)
+ end
+
+ def instantiate_fixtures(object, fixture_set, load_instances = true)
+ return unless load_instances
+ fixture_set.each do |fixture_name, fixture|
+ object.instance_variable_set "@#{fixture_name}", fixture.find
+ rescue FixtureClassNotFound
+ nil
+ end
+ end
+
+ def instantiate_all_loaded_fixtures(object, load_instances = true)
+ all_loaded_fixtures.each_value do |fixture_set|
+ instantiate_fixtures(object, fixture_set, load_instances)
+ end
end
- end
- def self.create_fixtures(fixtures_directory, fixture_set_names, class_names = {}, config = ActiveRecord::Base)
- fixture_set_names = Array(fixture_set_names).map(&:to_s)
- class_names = ClassCache.new class_names, config
+ def create_fixtures(fixtures_directory, fixture_set_names, class_names = {}, config = ActiveRecord::Base)
+ fixture_set_names = Array(fixture_set_names).map(&:to_s)
+ class_names = ClassCache.new class_names, config
- # FIXME: Apparently JK uses this.
- connection = block_given? ? yield : ActiveRecord::Base.connection
+ # FIXME: Apparently JK uses this.
+ connection = block_given? ? yield : ActiveRecord::Base.connection
- files_to_read = fixture_set_names.reject { |fs_name|
- fixture_is_cached?(connection, fs_name)
- }
+ fixture_files_to_read = fixture_set_names.reject do |fs_name|
+ fixture_is_cached?(connection, fs_name)
+ end
- unless files_to_read.empty?
- connection.disable_referential_integrity do
- fixtures_map = {}
+ if fixture_files_to_read.any?
+ fixtures_map = read_and_insert(
+ fixtures_directory,
+ fixture_files_to_read,
+ class_names,
+ connection,
+ )
+ cache_fixtures(connection, fixtures_map)
+ end
+ cached_fixtures(connection, fixture_set_names)
+ end
+
+ # Returns a consistent, platform-independent identifier for +label+.
+ # Integer identifiers are values less than 2^30. UUIDs are RFC 4122 version 5 SHA-1 hashes.
+ def identify(label, column_type = :integer)
+ if column_type == :uuid
+ Digest::UUID.uuid_v5(Digest::UUID::OID_NAMESPACE, label.to_s)
+ else
+ Zlib.crc32(label.to_s) % MAX_ID
+ end
+ end
+
+ # Superclass for the evaluation contexts used by ERB fixtures.
+ def context_class
+ @context_class ||= Class.new
+ end
+
+ private
- fixture_sets = files_to_read.map do |fs_name|
- klass = class_names[fs_name]
- conn = klass ? klass.connection : connection
- fixtures_map[fs_name] = new( # ActiveRecord::FixtureSet.new
- conn,
- fs_name,
+ def read_and_insert(fixtures_directory, fixture_files, class_names, connection) # :nodoc:
+ fixtures_map = {}
+ fixture_sets = fixture_files.map do |fixture_set_name|
+ klass = class_names[fixture_set_name]
+ fixtures_map[fixture_set_name] = new( # ActiveRecord::FixtureSet.new
+ nil,
+ fixture_set_name,
klass,
- ::File.join(fixtures_directory, fs_name))
+ ::File.join(fixtures_directory, fixture_set_name)
+ )
end
+ update_all_loaded_fixtures(fixtures_map)
- update_all_loaded_fixtures fixtures_map
+ insert(fixture_sets, connection)
- connection.transaction(:requires_new => true) do
- deleted_tables = Set.new
- fixture_sets.each do |fs|
- conn = fs.model_class.respond_to?(:connection) ? fs.model_class.connection : connection
- table_rows = fs.table_rows
+ fixtures_map
+ end
- table_rows.each_key do |table|
- unless deleted_tables.include? table
- conn.delete "DELETE FROM #{conn.quote_table_name(table)}", 'Fixture Delete'
- end
- deleted_tables << table
- end
+ def insert(fixture_sets, connection) # :nodoc:
+ fixture_sets_by_connection = fixture_sets.group_by do |fixture_set|
+ fixture_set.model_class&.connection || connection
+ end
- table_rows.each do |fixture_set_name, rows|
- rows.each do |row|
- conn.insert_fixture(row, fixture_set_name)
- end
- end
+ fixture_sets_by_connection.each do |conn, set|
+ table_rows_for_connection = Hash.new { |h, k| h[k] = [] }
- # Cap primary key sequences to max(pk).
- if conn.respond_to?(:reset_pk_sequence!)
- conn.reset_pk_sequence!(fs.table_name)
+ set.each do |fixture_set|
+ fixture_set.table_rows.each do |table, rows|
+ table_rows_for_connection[table].unshift(*rows)
end
end
- end
+ conn.insert_fixtures_set(table_rows_for_connection, table_rows_for_connection.keys)
- cache_fixtures(connection, fixtures_map)
+ # Cap primary key sequences to max(pk).
+ if conn.respond_to?(:reset_pk_sequence!)
+ set.each { |fs| conn.reset_pk_sequence!(fs.table_name) }
+ end
+ end
end
- end
- cached_fixtures(connection, fixture_set_names)
- end
- # Returns a consistent, platform-independent identifier for +label+.
- # Integer identifiers are values less than 2^30. UUIDs are RFC 4122 version 5 SHA-1 hashes.
- def self.identify(label, column_type = :integer)
- if column_type == :uuid
- Digest::UUID.uuid_v5(Digest::UUID::OID_NAMESPACE, label.to_s)
- else
- Zlib.crc32(label.to_s) % MAX_ID
- end
- end
-
- # Superclass for the evaluation contexts used by ERB fixtures.
- def self.context_class
- @context_class ||= Class.new
- end
-
- def self.update_all_loaded_fixtures(fixtures_map) # :nodoc:
- all_loaded_fixtures.update(fixtures_map)
+ def update_all_loaded_fixtures(fixtures_map) # :nodoc:
+ all_loaded_fixtures.update(fixtures_map)
+ end
end
attr_reader :table_name, :name, :fixtures, :model_class, :config
- def initialize(connection, name, class_name, path, config = ActiveRecord::Base)
+ def initialize(_, name, class_name, path, config = ActiveRecord::Base)
@name = name
@path = path
@config = config
@@ -597,18 +627,14 @@ module ActiveRecord
@fixtures = read_fixture_files(path)
- @connection = connection
-
- @table_name = ( model_class.respond_to?(:table_name) ?
- model_class.table_name :
- self.class.default_fixture_table_name(name, config) )
+ @table_name = model_class&.table_name || self.class.default_fixture_table_name(name, config)
end
def [](x)
fixtures[x]
end
- def []=(k,v)
+ def []=(k, v)
fixtures[k] = v
end
@@ -623,152 +649,18 @@ module ActiveRecord
# Returns a hash of rows to be inserted. The key is the table, the value is
# a list of rows to insert to that table.
def table_rows
- now = config.default_timezone == :utc ? Time.now.utc : Time.now
-
# allow a standard key to be used for doing defaults in YAML
- fixtures.delete('DEFAULTS')
-
- # track any join tables we need to insert later
- rows = Hash.new { |h,table| h[table] = [] }
-
- rows[table_name] = fixtures.map do |label, fixture|
- row = fixture.to_hash
-
- if model_class
- # fill in timestamp columns if they aren't specified and the model is set to record_timestamps
- if model_class.record_timestamps
- timestamp_column_names.each do |c_name|
- row[c_name] = now unless row.key?(c_name)
- end
- end
-
- # interpolate the fixture label
- row.each do |key, value|
- row[key] = value.gsub("$LABEL", label.to_s) if value.is_a?(String)
- end
-
- # generate a primary key if necessary
- if has_primary_key_column? && !row.include?(primary_key_name)
- row[primary_key_name] = ActiveRecord::FixtureSet.identify(label, primary_key_type)
- end
-
- # Resolve enums
- model_class.defined_enums.each do |name, values|
- if row.include?(name)
- row[name] = values.fetch(row[name], row[name])
- end
- end
-
- # If STI is used, find the correct subclass for association reflection
- reflection_class =
- if row.include?(inheritance_column_name)
- row[inheritance_column_name].constantize rescue model_class
- else
- model_class
- end
-
- reflection_class._reflections.each_value do |association|
- case association.macro
- when :belongs_to
- # Do not replace association name with association foreign key if they are named the same
- fk_name = (association.options[:foreign_key] || "#{association.name}_id").to_s
-
- if association.name.to_s != fk_name && value = row.delete(association.name.to_s)
- if association.polymorphic? && value.sub!(/\s*\(([^\)]*)\)\s*$/, "")
- # support polymorphic belongs_to as "label (Type)"
- row[association.foreign_type] = $1
- end
-
- fk_type = reflection_class.type_for_attribute(fk_name).type
- row[fk_name] = ActiveRecord::FixtureSet.identify(value, fk_type)
- end
- when :has_many
- if association.options[:through]
- add_join_records(rows, row, HasManyThroughProxy.new(association))
- end
- end
- end
- end
-
- row
- end
- rows
- end
-
- class ReflectionProxy # :nodoc:
- def initialize(association)
- @association = association
- end
-
- def join_table
- @association.join_table
- end
-
- def name
- @association.name
- end
-
- def primary_key_type
- @association.klass.type_for_attribute(@association.klass.primary_key).type
- end
- end
-
- class HasManyThroughProxy < ReflectionProxy # :nodoc:
- def rhs_key
- @association.foreign_key
- end
-
- def lhs_key
- @association.through_reflection.foreign_key
- end
+ fixtures.delete("DEFAULTS")
- def join_table
- @association.through_reflection.table_name
- end
+ TableRows.new(
+ table_name,
+ model_class: model_class,
+ fixtures: fixtures,
+ config: config,
+ ).to_hash
end
private
- def primary_key_name
- @primary_key_name ||= model_class && model_class.primary_key
- end
-
- def primary_key_type
- @primary_key_type ||= model_class && model_class.type_for_attribute(model_class.primary_key).type
- end
-
- def add_join_records(rows, row, association)
- # This is the case when the join table has no fixtures file
- if (targets = row.delete(association.name.to_s))
- table_name = association.join_table
- column_type = association.primary_key_type
- lhs_key = association.lhs_key
- rhs_key = association.rhs_key
-
- targets = targets.is_a?(Array) ? targets : targets.split(/\s*,\s*/)
- rows[table_name].concat targets.map { |target|
- { lhs_key => row[primary_key_name],
- rhs_key => ActiveRecord::FixtureSet.identify(target, column_type) }
- }
- end
- end
-
- def has_primary_key_column?
- @has_primary_key_column ||= primary_key_name &&
- model_class.columns.any? { |c| c.name == primary_key_name }
- end
-
- def timestamp_column_names
- @timestamp_column_names ||=
- %w(created_at created_on updated_at updated_on) & column_names
- end
-
- def inheritance_column_name
- @inheritance_column_name ||= model_class && model_class.inheritance_column
- end
-
- def column_names
- @column_names ||= @connection.columns(@table_name).collect(&:name)
- end
def model_class=(class_name)
if class_name.is_a?(Class) # TODO: Should be an AR::Base type class, or any?
@@ -799,7 +691,6 @@ module ActiveRecord
def yaml_file_path(path)
"#{path}.yml"
end
-
end
class Fixture #:nodoc:
@@ -833,212 +724,9 @@ module ActiveRecord
alias :to_hash :fixture
def find
- if model_class
- model_class.unscoped do
- model_class.find(fixture[model_class.primary_key])
- end
- else
- raise FixtureClassNotFound, "No class attached to find."
- end
- end
- end
-end
-
-module ActiveRecord
- module TestFixtures
- extend ActiveSupport::Concern
-
- def before_setup # :nodoc:
- setup_fixtures
- super
- end
-
- def after_teardown # :nodoc:
- super
- teardown_fixtures
- end
-
- included do
- class_attribute :fixture_path, :instance_writer => false
- class_attribute :fixture_table_names
- class_attribute :fixture_class_names
- class_attribute :use_transactional_tests
- class_attribute :use_transactional_fixtures
- class_attribute :use_instantiated_fixtures # true, false, or :no_instances
- class_attribute :pre_loaded_fixtures
- class_attribute :config
-
- singleton_class.deprecate 'use_transactional_fixtures=' => 'use use_transactional_tests= instead'
-
- self.fixture_table_names = []
- self.use_instantiated_fixtures = false
- self.pre_loaded_fixtures = false
- self.config = ActiveRecord::Base
-
- self.fixture_class_names = {}
-
- silence_warnings do
- define_singleton_method :use_transactional_tests do
- if use_transactional_fixtures.nil?
- true
- else
- use_transactional_fixtures
- end
- end
- end
- end
-
- module ClassMethods
- # Sets the model class for a fixture when the class name cannot be inferred from the fixture name.
- #
- # Examples:
- #
- # set_fixture_class some_fixture: SomeModel,
- # 'namespaced/fixture' => Another::Model
- #
- # The keys must be the fixture names, that coincide with the short paths to the fixture files.
- def set_fixture_class(class_names = {})
- self.fixture_class_names = self.fixture_class_names.merge(class_names.stringify_keys)
- end
-
- def fixtures(*fixture_set_names)
- if fixture_set_names.first == :all
- fixture_set_names = Dir["#{fixture_path}/{**,*}/*.{yml}"]
- fixture_set_names.map! { |f| f[(fixture_path.to_s.size + 1)..-5] }
- else
- fixture_set_names = fixture_set_names.flatten.map(&:to_s)
- end
-
- self.fixture_table_names |= fixture_set_names
- setup_fixture_accessors(fixture_set_names)
- end
-
- def setup_fixture_accessors(fixture_set_names = nil)
- fixture_set_names = Array(fixture_set_names || fixture_table_names)
- methods = Module.new do
- fixture_set_names.each do |fs_name|
- fs_name = fs_name.to_s
- accessor_name = fs_name.tr('/', '_').to_sym
-
- define_method(accessor_name) do |*fixture_names|
- force_reload = fixture_names.pop if fixture_names.last == true || fixture_names.last == :reload
-
- @fixture_cache[fs_name] ||= {}
-
- instances = fixture_names.map do |f_name|
- f_name = f_name.to_s if f_name.is_a?(Symbol)
- @fixture_cache[fs_name].delete(f_name) if force_reload
-
- if @loaded_fixtures[fs_name][f_name]
- @fixture_cache[fs_name][f_name] ||= @loaded_fixtures[fs_name][f_name].find
- else
- raise StandardError, "No fixture named '#{f_name}' found for fixture set '#{fs_name}'"
- end
- end
-
- instances.size == 1 ? instances.first : instances
- end
- private accessor_name
- end
- end
- include methods
- end
-
- def uses_transaction(*methods)
- @uses_transaction = [] unless defined?(@uses_transaction)
- @uses_transaction.concat methods.map(&:to_s)
- end
-
- def uses_transaction?(method)
- @uses_transaction = [] unless defined?(@uses_transaction)
- @uses_transaction.include?(method.to_s)
- end
- end
-
- def run_in_transaction?
- use_transactional_tests &&
- !self.class.uses_transaction?(method_name)
- end
-
- def setup_fixtures(config = ActiveRecord::Base)
- if pre_loaded_fixtures && !use_transactional_tests
- raise RuntimeError, 'pre_loaded_fixtures requires use_transactional_tests'
- end
-
- @fixture_cache = {}
- @fixture_connections = []
- @@already_loaded_fixtures ||= {}
-
- # Load fixtures once and begin transaction.
- if run_in_transaction?
- if @@already_loaded_fixtures[self.class]
- @loaded_fixtures = @@already_loaded_fixtures[self.class]
- else
- @loaded_fixtures = load_fixtures(config)
- @@already_loaded_fixtures[self.class] = @loaded_fixtures
- end
- @fixture_connections = enlist_fixture_connections
- @fixture_connections.each do |connection|
- connection.begin_transaction joinable: false
- end
- # Load fixtures for every test.
- else
- ActiveRecord::FixtureSet.reset_cache
- @@already_loaded_fixtures[self.class] = nil
- @loaded_fixtures = load_fixtures(config)
- end
-
- # Instantiate fixtures for every test if requested.
- instantiate_fixtures if use_instantiated_fixtures
- end
-
- def teardown_fixtures
- # Rollback changes if a transaction is active.
- if run_in_transaction?
- @fixture_connections.each do |connection|
- connection.rollback_transaction if connection.transaction_open?
- end
- @fixture_connections.clear
- else
- ActiveRecord::FixtureSet.reset_cache
- end
-
- ActiveRecord::Base.clear_active_connections!
- end
-
- def enlist_fixture_connections
- ActiveRecord::Base.connection_handler.connection_pool_list.map(&:connection)
- end
-
- private
- def load_fixtures(config)
- fixtures = ActiveRecord::FixtureSet.create_fixtures(fixture_path, fixture_table_names, fixture_class_names, config)
- Hash[fixtures.map { |f| [f.name, f] }]
- end
-
- def instantiate_fixtures
- if pre_loaded_fixtures
- raise RuntimeError, 'Load fixtures before instantiating them.' if ActiveRecord::FixtureSet.all_loaded_fixtures.empty?
- ActiveRecord::FixtureSet.instantiate_all_loaded_fixtures(self, load_instances?)
- else
- raise RuntimeError, 'Load fixtures before instantiating them.' if @loaded_fixtures.nil?
- @loaded_fixtures.each_value do |fixture_set|
- ActiveRecord::FixtureSet.instantiate_fixtures(self, fixture_set, load_instances?)
- end
- end
- end
-
- def load_instances?
- use_instantiated_fixtures != :no_instances
- end
- end
-end
-
-class ActiveRecord::FixtureSet::RenderContext # :nodoc:
- def self.create_subclass
- Class.new ActiveRecord::FixtureSet.context_class do
- def get_binding
- binding()
+ raise FixtureClassNotFound, "No class attached to find." unless model_class
+ model_class.unscoped do
+ model_class.find(fixture[model_class.primary_key])
end
end
end
diff --git a/activerecord/lib/active_record/gem_version.rb b/activerecord/lib/active_record/gem_version.rb
index 73be4cb271..f77bc2e3c1 100644
--- a/activerecord/lib/active_record/gem_version.rb
+++ b/activerecord/lib/active_record/gem_version.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
# Returns the version of the currently loaded Active Record as a <tt>Gem::Version</tt>
def self.gem_version
@@ -5,7 +7,7 @@ module ActiveRecord
end
module VERSION
- MAJOR = 5
+ MAJOR = 6
MINOR = 0
TINY = 0
PRE = "beta3"
diff --git a/activerecord/lib/active_record/inheritance.rb b/activerecord/lib/active_record/inheritance.rb
index 899683ee4f..9570bc6f86 100644
--- a/activerecord/lib/active_record/inheritance.rb
+++ b/activerecord/lib/active_record/inheritance.rb
@@ -1,4 +1,6 @@
-require 'active_support/core_ext/hash/indifferent_access'
+# frozen_string_literal: true
+
+require "active_support/core_ext/hash/indifferent_access"
module ActiveRecord
# == Single table inheritance
@@ -19,7 +21,7 @@ module ActiveRecord
# Be aware that because the type column is an attribute on the record every new
# subclass will instantly be marked as dirty and the type column will be included
# in the list of changed attributes on the record. This is different from non
- # STI classes:
+ # Single Table Inheritance(STI) classes:
#
# Company.new.changed? # => false
# Firm.new.changed? # => true
@@ -30,37 +32,40 @@ module ActiveRecord
# for differentiating between them or reloading the right type with find.
#
# Note, all the attributes for all the cases are kept in the same table. Read more:
- # http://www.martinfowler.com/eaaCatalog/singleTableInheritance.html
+ # https://www.martinfowler.com/eaaCatalog/singleTableInheritance.html
#
module Inheritance
extend ActiveSupport::Concern
included do
# Determines whether to store the full constant name including namespace when using STI.
- class_attribute :store_full_sti_class, instance_writer: false
- self.store_full_sti_class = true
+ # This is true, by default.
+ class_attribute :store_full_sti_class, instance_writer: false, default: true
end
module ClassMethods
# Determines if one of the attributes passed in is the inheritance column,
# and if the inheritance column is attr accessible, it initializes an
# instance of the given subclass instead of the base class.
- def new(*args, &block)
+ def new(attributes = nil, &block)
if abstract_class? || self == Base
raise NotImplementedError, "#{self} is an abstract class and cannot be instantiated."
end
- attrs = args.first
if has_attribute?(inheritance_column)
- subclass = subclass_from_attributes(attrs)
+ subclass = subclass_from_attributes(attributes)
+
+ if subclass.nil? && scope_attributes = current_scope&.scope_for_create
+ subclass = subclass_from_attributes(scope_attributes)
+ end
- if subclass.nil? && base_class == self
+ if subclass.nil? && base_class?
subclass = subclass_from_attributes(column_defaults)
end
end
if subclass && subclass != self
- subclass.new(*args, &block)
+ subclass.new(attributes, &block)
else
super
end
@@ -103,21 +108,53 @@ module ActiveRecord
end
end
- # Set this to true if this is an abstract class (see <tt>abstract_class?</tt>).
- # If you are using inheritance with ActiveRecord and don't want child classes
- # to utilize the implied STI table name of the parent class, this will need to be true.
- # For example, given the following:
+ # Returns whether the class is a base class.
+ # See #base_class for more information.
+ def base_class?
+ base_class == self
+ end
+
+ # Set this to +true+ if this is an abstract class (see
+ # <tt>abstract_class?</tt>).
+ # If you are using inheritance with Active Record and don't want a class
+ # to be considered as part of the STI hierarchy, you must set this to
+ # true.
+ # +ApplicationRecord+, for example, is generated as an abstract class.
+ #
+ # Consider the following default behaviour:
#
- # class SuperClass < ActiveRecord::Base
+ # Shape = Class.new(ActiveRecord::Base)
+ # Polygon = Class.new(Shape)
+ # Square = Class.new(Polygon)
+ #
+ # Shape.table_name # => "shapes"
+ # Polygon.table_name # => "shapes"
+ # Square.table_name # => "shapes"
+ # Shape.create! # => #<Shape id: 1, type: nil>
+ # Polygon.create! # => #<Polygon id: 2, type: "Polygon">
+ # Square.create! # => #<Square id: 3, type: "Square">
+ #
+ # However, when using <tt>abstract_class</tt>, +Shape+ is omitted from
+ # the hierarchy:
+ #
+ # class Shape < ActiveRecord::Base
# self.abstract_class = true
# end
- # class Child < SuperClass
- # self.table_name = 'the_table_i_really_want'
- # end
- #
+ # Polygon = Class.new(Shape)
+ # Square = Class.new(Polygon)
#
- # <tt>self.abstract_class = true</tt> is required to make <tt>Child<.find,.create, or any Arel method></tt> use <tt>the_table_i_really_want</tt> instead of a table called <tt>super_classes</tt>
+ # Shape.table_name # => nil
+ # Polygon.table_name # => "polygons"
+ # Square.table_name # => "polygons"
+ # Shape.create! # => NotImplementedError: Shape is an abstract class and cannot be instantiated.
+ # Polygon.create! # => #<Polygon id: 1, type: nil>
+ # Square.create! # => #<Square id: 2, type: "Square">
#
+ # Note that in the above example, to disallow the creation of a plain
+ # +Polygon+, you should use <tt>validates :type, presence: true</tt>,
+ # instead of setting it as an abstract class. This way, +Polygon+ will
+ # stay in the hierarchy, and Active Record will continue to correctly
+ # derive the table name.
attr_accessor :abstract_class
# Returns whether this class is an abstract class or not.
@@ -129,87 +166,104 @@ module ActiveRecord
store_full_sti_class ? name : name.demodulize
end
+ def polymorphic_name
+ base_class.name
+ end
+
+ def inherited(subclass)
+ subclass.instance_variable_set(:@_type_candidates_cache, Concurrent::Map.new)
+ super
+ end
+
protected
- # Returns the class type of the record using the current module as a prefix. So descendants of
- # MyApp::Business::Account would appear as MyApp::Business::AccountSubclass.
- def compute_type(type_name)
- if type_name.match(/^::/)
- # If the type is prefixed with a scope operator then we assume that
- # the type_name is an absolute reference.
- ActiveSupport::Dependencies.constantize(type_name)
- else
- # Build a list of candidates to search for
- candidates = []
- name.scan(/::|$/) { candidates.unshift "#{$`}::#{type_name}" }
- candidates << type_name
-
- candidates.each do |candidate|
- constant = ActiveSupport::Dependencies.safe_constantize(candidate)
- return constant if candidate == constant.to_s
- end
+ # Returns the class type of the record using the current module as a prefix. So descendants of
+ # MyApp::Business::Account would appear as MyApp::Business::AccountSubclass.
+ def compute_type(type_name)
+ if type_name.start_with?("::")
+ # If the type is prefixed with a scope operator then we assume that
+ # the type_name is an absolute reference.
+ ActiveSupport::Dependencies.constantize(type_name)
+ else
+ type_candidate = @_type_candidates_cache[type_name]
+ if type_candidate && type_constant = ActiveSupport::Dependencies.safe_constantize(type_candidate)
+ return type_constant
+ end
- raise NameError.new("uninitialized constant #{candidates.first}", candidates.first)
- end
- end
+ # Build a list of candidates to search for
+ candidates = []
+ name.scan(/::|$/) { candidates.unshift "#{$`}::#{type_name}" }
+ candidates << type_name
- private
+ candidates.each do |candidate|
+ constant = ActiveSupport::Dependencies.safe_constantize(candidate)
+ if candidate == constant.to_s
+ @_type_candidates_cache[type_name] = candidate
+ return constant
+ end
+ end
- # Called by +instantiate+ to decide which class to use for a new
- # record instance. For single-table inheritance, we check the record
- # for a +type+ column and return the corresponding class.
- def discriminate_class_for_record(record)
- if using_single_table_inheritance?(record)
- find_sti_class(record[inheritance_column])
- else
- super
+ raise NameError.new("uninitialized constant #{candidates.first}", candidates.first)
+ end
end
- end
- def using_single_table_inheritance?(record)
- record[inheritance_column].present? && has_attribute?(inheritance_column)
- end
+ private
- def find_sti_class(type_name)
- type_name = base_class.type_for_attribute(inheritance_column).cast(type_name)
- subclass = begin
- if store_full_sti_class
- ActiveSupport::Dependencies.constantize(type_name)
+ # Called by +instantiate+ to decide which class to use for a new
+ # record instance. For single-table inheritance, we check the record
+ # for a +type+ column and return the corresponding class.
+ def discriminate_class_for_record(record)
+ if using_single_table_inheritance?(record)
+ find_sti_class(record[inheritance_column])
else
- compute_type(type_name)
+ super
end
- rescue NameError
- raise SubclassNotFound,
- "The single-table inheritance mechanism failed to locate the subclass: '#{type_name}'. " \
- "This error is raised because the column '#{inheritance_column}' is reserved for storing the class in case of inheritance. " \
- "Please rename this column if you didn't intend it to be used for storing the inheritance class " \
- "or overwrite #{name}.inheritance_column to use another column for that information."
end
- unless subclass == self || descendants.include?(subclass)
- raise SubclassNotFound, "Invalid single-table inheritance type: #{subclass.name} is not a subclass of #{name}"
+
+ def using_single_table_inheritance?(record)
+ record[inheritance_column].present? && has_attribute?(inheritance_column)
+ end
+
+ def find_sti_class(type_name)
+ type_name = base_class.type_for_attribute(inheritance_column).cast(type_name)
+ subclass = begin
+ if store_full_sti_class
+ ActiveSupport::Dependencies.constantize(type_name)
+ else
+ compute_type(type_name)
+ end
+ rescue NameError
+ raise SubclassNotFound,
+ "The single-table inheritance mechanism failed to locate the subclass: '#{type_name}'. " \
+ "This error is raised because the column '#{inheritance_column}' is reserved for storing the class in case of inheritance. " \
+ "Please rename this column if you didn't intend it to be used for storing the inheritance class " \
+ "or overwrite #{name}.inheritance_column to use another column for that information."
+ end
+ unless subclass == self || descendants.include?(subclass)
+ raise SubclassNotFound, "Invalid single-table inheritance type: #{subclass.name} is not a subclass of #{name}"
+ end
+ subclass
end
- subclass
- end
- def type_condition(table = arel_table)
- sti_column = arel_attribute(inheritance_column, table)
- sti_names = ([self] + descendants).map(&:sti_name)
+ def type_condition(table = arel_table)
+ sti_column = arel_attribute(inheritance_column, table)
+ sti_names = ([self] + descendants).map(&:sti_name)
- sti_column.in(sti_names)
- end
+ predicate_builder.build(sti_column, sti_names)
+ end
- # Detect the subclass from the inheritance column of attrs. If the inheritance column value
- # is not self or a valid subclass, raises ActiveRecord::SubclassNotFound
- def subclass_from_attributes(attrs)
- attrs = attrs.to_h if attrs.respond_to?(:permitted?)
- if attrs.is_a?(Hash)
- subclass_name = attrs.with_indifferent_access[inheritance_column]
+ # Detect the subclass from the inheritance column of attrs. If the inheritance column value
+ # is not self or a valid subclass, raises ActiveRecord::SubclassNotFound
+ def subclass_from_attributes(attrs)
+ attrs = attrs.to_h if attrs.respond_to?(:permitted?)
+ if attrs.is_a?(Hash)
+ subclass_name = attrs[inheritance_column] || attrs[inheritance_column.to_sym]
- if subclass_name.present?
- find_sti_class(subclass_name)
+ if subclass_name.present?
+ find_sti_class(subclass_name)
+ end
end
end
- end
end
def initialize_dup(other)
@@ -219,21 +273,21 @@ module ActiveRecord
private
- def initialize_internals_callback
- super
- ensure_proper_type
- end
+ def initialize_internals_callback
+ super
+ ensure_proper_type
+ end
- # Sets the attribute used for single table inheritance to this class name if this is not the
- # ActiveRecord::Base descendant.
- # Considering the hierarchy Reply < Message < ActiveRecord::Base, this makes it possible to
- # do Reply.new without having to set <tt>Reply[Reply.inheritance_column] = "Reply"</tt> yourself.
- # No such attribute would be set for objects of the Message class in that example.
- def ensure_proper_type
- klass = self.class
- if klass.finder_needs_type_condition?
- write_attribute(klass.inheritance_column, klass.sti_name)
+ # Sets the attribute used for single table inheritance to this class name if this is not the
+ # ActiveRecord::Base descendant.
+ # Considering the hierarchy Reply < Message < ActiveRecord::Base, this makes it possible to
+ # do Reply.new without having to set <tt>Reply[Reply.inheritance_column] = "Reply"</tt> yourself.
+ # No such attribute would be set for objects of the Message class in that example.
+ def ensure_proper_type
+ klass = self.class
+ if klass.finder_needs_type_condition?
+ _write_attribute(klass.inheritance_column, klass.sti_name)
+ end
end
- end
end
end
diff --git a/activerecord/lib/active_record/insert_all.rb b/activerecord/lib/active_record/insert_all.rb
new file mode 100644
index 0000000000..4b02d40aa0
--- /dev/null
+++ b/activerecord/lib/active_record/insert_all.rb
@@ -0,0 +1,178 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class InsertAll # :nodoc:
+ attr_reader :model, :connection, :inserts, :keys
+ attr_reader :on_duplicate, :returning, :unique_by
+
+ def initialize(model, inserts, on_duplicate:, returning: nil, unique_by: nil)
+ raise ArgumentError, "Empty list of attributes passed" if inserts.blank?
+
+ @model, @connection, @inserts, @keys = model, model.connection, inserts, inserts.first.keys.map(&:to_s).to_set
+ @on_duplicate, @returning, @unique_by = on_duplicate, returning, unique_by
+
+ @returning = (connection.supports_insert_returning? ? primary_keys : false) if @returning.nil?
+ @returning = false if @returning == []
+
+ @unique_by = find_unique_index_for(unique_by) if unique_by
+ @on_duplicate = :skip if @on_duplicate == :update && updatable_columns.empty?
+
+ ensure_valid_options_for_connection!
+ end
+
+ def execute
+ connection.exec_query to_sql, "Bulk Insert"
+ end
+
+ def updatable_columns
+ keys - readonly_columns - unique_by_columns
+ end
+
+ def primary_keys
+ Array(model.primary_key)
+ end
+
+
+ def skip_duplicates?
+ on_duplicate == :skip
+ end
+
+ def update_duplicates?
+ on_duplicate == :update
+ end
+
+ def map_key_with_value
+ inserts.map do |attributes|
+ attributes = attributes.stringify_keys
+ verify_attributes(attributes)
+
+ keys.map do |key|
+ yield key, attributes[key]
+ end
+ end
+ end
+
+ private
+ def find_unique_index_for(unique_by)
+ match = Array(unique_by).map(&:to_s)
+
+ if index = unique_indexes.find { |i| match.include?(i.name) || i.columns == match }
+ index
+ else
+ raise ArgumentError, "No unique index found for #{unique_by}"
+ end
+ end
+
+ def unique_indexes
+ connection.schema_cache.indexes(model.table_name).select(&:unique)
+ end
+
+
+ def ensure_valid_options_for_connection!
+ if returning && !connection.supports_insert_returning?
+ raise ArgumentError, "#{connection.class} does not support :returning"
+ end
+
+ if skip_duplicates? && !connection.supports_insert_on_duplicate_skip?
+ raise ArgumentError, "#{connection.class} does not support skipping duplicates"
+ end
+
+ if update_duplicates? && !connection.supports_insert_on_duplicate_update?
+ raise ArgumentError, "#{connection.class} does not support upsert"
+ end
+
+ if unique_by && !connection.supports_insert_conflict_target?
+ raise ArgumentError, "#{connection.class} does not support :unique_by"
+ end
+ end
+
+
+ def to_sql
+ connection.build_insert_sql(ActiveRecord::InsertAll::Builder.new(self))
+ end
+
+
+ def readonly_columns
+ primary_keys + model.readonly_attributes.to_a
+ end
+
+ def unique_by_columns
+ Array(unique_by&.columns)
+ end
+
+
+ def verify_attributes(attributes)
+ if keys != attributes.keys.to_set
+ raise ArgumentError, "All objects being inserted must have the same keys"
+ end
+ end
+
+
+ class Builder
+ attr_reader :model
+
+ delegate :skip_duplicates?, :update_duplicates?, to: :insert_all
+
+ def initialize(insert_all)
+ @insert_all, @model, @connection = insert_all, insert_all.model, insert_all.connection
+ end
+
+ def into
+ "INTO #{model.quoted_table_name}(#{columns_list})"
+ end
+
+ def values_list
+ types = extract_types_from_columns_on(model.table_name, keys: insert_all.keys)
+
+ values_list = insert_all.map_key_with_value do |key, value|
+ bind = Relation::QueryAttribute.new(key, value, types[key])
+ connection.with_yaml_fallback(bind.value_for_database)
+ end
+
+ Arel::InsertManager.new.create_values_list(values_list).to_sql
+ end
+
+ def returning
+ format_columns(insert_all.returning) if insert_all.returning
+ end
+
+ def conflict_target
+ if index = insert_all.unique_by
+ sql = +"(#{format_columns(index.columns)})"
+ sql << " WHERE #{index.where}" if index.where
+ sql
+ elsif update_duplicates?
+ "(#{format_columns(insert_all.primary_keys)})"
+ end
+ end
+
+ def updatable_columns
+ quote_columns(insert_all.updatable_columns)
+ end
+
+ private
+ attr_reader :connection, :insert_all
+
+ def columns_list
+ format_columns(insert_all.keys)
+ end
+
+ def extract_types_from_columns_on(table_name, keys:)
+ columns = connection.schema_cache.columns_hash(table_name)
+
+ unknown_column = (keys - columns.keys).first
+ raise UnknownAttributeError.new(model.new, unknown_column) if unknown_column
+
+ keys.map { |key| [ key, connection.lookup_cast_type_from_column(columns[key]) ] }.to_h
+ end
+
+ def format_columns(columns)
+ quote_columns(columns).join(",")
+ end
+
+ def quote_columns(columns)
+ columns.map(&connection.method(:quote_column_name))
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/integration.rb b/activerecord/lib/active_record/integration.rb
index 466c8509a4..fa6f0d36ec 100644
--- a/activerecord/lib/active_record/integration.rb
+++ b/activerecord/lib/active_record/integration.rb
@@ -1,4 +1,6 @@
-require 'active_support/core_ext/string/filters'
+# frozen_string_literal: true
+
+require "active_support/core_ext/string/filters"
module ActiveRecord
module Integration
@@ -7,17 +9,24 @@ module ActiveRecord
included do
##
# :singleton-method:
- # Indicates the format used to generate the timestamp in the cache key.
- # Accepts any of the symbols in <tt>Time::DATE_FORMATS</tt>.
+ # Indicates the format used to generate the timestamp in the cache key, if
+ # versioning is off. Accepts any of the symbols in <tt>Time::DATE_FORMATS</tt>.
#
# This is +:usec+, by default.
- class_attribute :cache_timestamp_format, :instance_writer => false
- self.cache_timestamp_format = :usec
+ class_attribute :cache_timestamp_format, instance_writer: false, default: :usec
+
+ ##
+ # :singleton-method:
+ # Indicates whether to use a stable #cache_key method that is accompanied
+ # by a changing version in the #cache_version method.
+ #
+ # This is +true+, by default on Rails 5.2 and above.
+ class_attribute :cache_versioning, instance_writer: false, default: false
end
- # Returns a String, which Action Pack uses for constructing a URL to this
- # object. The default implementation returns this record's id as a String,
- # or nil if this record's unsaved.
+ # Returns a +String+, which Action Pack uses for constructing a URL to this
+ # object. The default implementation returns this record's id as a +String+,
+ # or +nil+ if this record's unsaved.
#
# For example, suppose that you have a User model, and that you have a
# <tt>resources :users</tt> route. Normally, +user_path+ will
@@ -42,29 +51,64 @@ module ActiveRecord
id && id.to_s # Be sure to stringify the id for routes
end
- # Returns a cache key that can be used to identify this record.
+ # Returns a stable cache key that can be used to identify this record.
#
# Product.new.cache_key # => "products/new"
- # Product.find(5).cache_key # => "products/5" (updated_at not available)
- # Person.find(5).cache_key # => "people/5-20071224150000" (updated_at available)
+ # Product.find(5).cache_key # => "products/5"
#
- # You can also pass a list of named timestamps, and the newest in the list will be
- # used to generate the key:
+ # If ActiveRecord::Base.cache_versioning is turned off, as it was in Rails 5.1 and earlier,
+ # the cache key will also include a version.
#
- # Person.find(5).cache_key(:updated_at, :last_reviewed_at)
- def cache_key(*timestamp_names)
- case
- when new_record?
+ # Product.cache_versioning = false
+ # Product.find(5).cache_key # => "products/5-20071224150000" (updated_at available)
+ def cache_key
+ if new_record?
"#{model_name.cache_key}/new"
- when timestamp_names.any?
- timestamp = max_updated_column_timestamp(timestamp_names)
- timestamp = timestamp.utc.to_s(cache_timestamp_format)
- "#{model_name.cache_key}/#{id}-#{timestamp}"
- when timestamp = max_updated_column_timestamp
- timestamp = timestamp.utc.to_s(cache_timestamp_format)
- "#{model_name.cache_key}/#{id}-#{timestamp}"
else
- "#{model_name.cache_key}/#{id}"
+ if cache_version
+ "#{model_name.cache_key}/#{id}"
+ else
+ timestamp = max_updated_column_timestamp
+
+ if timestamp
+ timestamp = timestamp.utc.to_s(cache_timestamp_format)
+ "#{model_name.cache_key}/#{id}-#{timestamp}"
+ else
+ "#{model_name.cache_key}/#{id}"
+ end
+ end
+ end
+ end
+
+ # Returns a cache version that can be used together with the cache key to form
+ # a recyclable caching scheme. By default, the #updated_at column is used for the
+ # cache_version, but this method can be overwritten to return something else.
+ #
+ # Note, this method will return nil if ActiveRecord::Base.cache_versioning is set to
+ # +false+ (which it is by default until Rails 6.0).
+ def cache_version
+ return unless cache_versioning
+
+ if has_attribute?("updated_at")
+ timestamp = updated_at_before_type_cast
+ if can_use_fast_cache_version?(timestamp)
+ raw_timestamp_to_cache_version(timestamp)
+ elsif timestamp = updated_at
+ timestamp.utc.to_s(cache_timestamp_format)
+ end
+ else
+ if self.class.has_attribute?("updated_at")
+ raise ActiveModel::MissingAttributeError, "missing attribute: updated_at"
+ end
+ end
+ end
+
+ # Returns a cache key along with the version.
+ def cache_key_with_version
+ if version = cache_version
+ "#{cache_key}-#{version}"
+ else
+ cache_key
end
end
@@ -86,7 +130,7 @@ module ActiveRecord
#
# user = User.find_by(name: 'David Heinemeier Hansson')
# user.id # => 125
- # user_path(user) # => "/users/125-david"
+ # user_path(user) # => "/users/125-david-heinemeier"
#
# Because the generated param begins with the record's +id+, it is
# suitable for passing to +find+. In a controller, for example:
@@ -100,7 +144,7 @@ module ActiveRecord
define_method :to_param do
if (default = super()) &&
(result = send(method_name).to_s).present? &&
- (param = result.squish.truncate(20, separator: /\s/, omission: nil).parameterize).present?
+ (param = result.squish.parameterize.truncate(20, separator: /-/, omission: "")).present?
"#{default}-#{param}"
else
default
@@ -109,5 +153,43 @@ module ActiveRecord
end
end
end
+
+ private
+ # Detects if the value before type cast
+ # can be used to generate a cache_version.
+ #
+ # The fast cache version only works with a
+ # string value directly from the database.
+ #
+ # We also must check if the timestamp format has been changed
+ # or if the timezone is not set to UTC then
+ # we cannot apply our transformations correctly.
+ def can_use_fast_cache_version?(timestamp)
+ timestamp.is_a?(String) &&
+ cache_timestamp_format == :usec &&
+ default_timezone == :utc &&
+ !updated_at_came_from_user?
+ end
+
+ # Converts a raw database string to `:usec`
+ # format.
+ #
+ # Example:
+ #
+ # timestamp = "2018-10-15 20:02:15.266505"
+ # raw_timestamp_to_cache_version(timestamp)
+ # # => "20181015200215266505"
+ #
+ # Postgres truncates trailing zeros,
+ # https://github.com/postgres/postgres/commit/3e1beda2cde3495f41290e1ece5d544525810214
+ # to account for this we pad the output with zeros
+ def raw_timestamp_to_cache_version(timestamp)
+ key = timestamp.delete("- :.")
+ if key.length < 20
+ key.ljust(20, "0")
+ else
+ key
+ end
+ end
end
end
diff --git a/activerecord/lib/active_record/internal_metadata.rb b/activerecord/lib/active_record/internal_metadata.rb
index 81db96bffd..88b0c828ae 100644
--- a/activerecord/lib/active_record/internal_metadata.rb
+++ b/activerecord/lib/active_record/internal_metadata.rb
@@ -1,11 +1,17 @@
-require 'active_record/scoping/default'
-require 'active_record/scoping/named'
+# frozen_string_literal: true
+
+require "active_record/scoping/default"
+require "active_record/scoping/named"
module ActiveRecord
# This class is used to create a table that keeps track of values and keys such
# as which environment migrations were run in.
class InternalMetadata < ActiveRecord::Base # :nodoc:
class << self
+ def _internal?
+ true
+ end
+
def primary_key
"key"
end
@@ -14,12 +20,8 @@ module ActiveRecord
"#{table_name_prefix}#{ActiveRecord::Base.internal_metadata_table_name}#{table_name_suffix}"
end
- def original_table_name
- "#{table_name_prefix}active_record_internal_metadatas#{table_name_suffix}"
- end
-
def []=(key, value)
- first_or_initialize(key: key).update_attributes!(value: value)
+ find_or_initialize_by(key: key).update!(value: value)
end
def [](key)
@@ -27,20 +29,11 @@ module ActiveRecord
end
def table_exists?
- ActiveSupport::Deprecation.silence { connection.table_exists?(table_name) }
- end
-
- def original_table_exists?
- # This method will be removed in Rails 5.1
- # Since it is only necessary when `active_record_internal_metadatas` could exist
- ActiveSupport::Deprecation.silence { connection.table_exists?(original_table_name) }
+ connection.table_exists?(table_name)
end
# Creates an internal metadata table with columns +key+ and +value+
def create_table
- if original_table_exists?
- connection.rename_table(original_table_name, table_name)
- end
unless table_exists?
key_options = connection.internal_string_options_for_primary_key
diff --git a/activerecord/lib/active_record/legacy_yaml_adapter.rb b/activerecord/lib/active_record/legacy_yaml_adapter.rb
index 89dee58423..ffa095dd94 100644
--- a/activerecord/lib/active_record/legacy_yaml_adapter.rb
+++ b/activerecord/lib/active_record/legacy_yaml_adapter.rb
@@ -1,12 +1,14 @@
+# frozen_string_literal: true
+
module ActiveRecord
module LegacyYamlAdapter
def self.convert(klass, coder)
return coder unless coder.is_a?(Psych::Coder)
case coder["active_record_yaml_version"]
- when 1 then coder
+ when 1, 2 then coder
else
- if coder["attributes"].is_a?(AttributeSet)
+ if coder["attributes"].is_a?(ActiveModel::AttributeSet)
Rails420.convert(klass, coder)
else
Rails41.convert(klass, coder)
diff --git a/activerecord/lib/active_record/locking/optimistic.rb b/activerecord/lib/active_record/locking/optimistic.rb
index 2336d23a1c..4a3a31fc95 100644
--- a/activerecord/lib/active_record/locking/optimistic.rb
+++ b/activerecord/lib/active_record/locking/optimistic.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Locking
# == What is Optimistic Locking
@@ -51,8 +53,7 @@ module ActiveRecord
extend ActiveSupport::Concern
included do
- class_attribute :lock_optimistically, instance_writer: false
- self.lock_optimistically = true
+ class_attribute :lock_optimistically, instance_writer: false, default: true
end
def locking_enabled? #:nodoc:
@@ -60,13 +61,7 @@ module ActiveRecord
end
private
- def increment_lock
- lock_col = self.class.locking_column
- previous_lock_value = send(lock_col).to_i
- send(lock_col + '=', previous_lock_value + 1)
- end
-
- def _create_record(attribute_names = self.attribute_names, *) # :nodoc:
+ def _create_record(attribute_names = self.attribute_names)
if locking_enabled?
# We always want to persist the locking version, even if we don't detect
# a change from the default, since the database might have no default
@@ -75,127 +70,128 @@ module ActiveRecord
super
end
- def _update_record(attribute_names = self.attribute_names) #:nodoc:
+ def _touch_row(attribute_names, time)
+ super
+ ensure
+ clear_attribute_change(self.class.locking_column) if locking_enabled?
+ end
+
+ def _update_row(attribute_names, attempted_action = "update")
return super unless locking_enabled?
- return 0 if attribute_names.empty?
- lock_col = self.class.locking_column
- previous_lock_value = send(lock_col).to_i
- increment_lock
+ begin
+ locking_column = self.class.locking_column
+ previous_lock_value = read_attribute_before_type_cast(locking_column)
+ attribute_names << locking_column
- attribute_names += [lock_col]
- attribute_names.uniq!
+ self[locking_column] += 1
- begin
- relation = self.class.unscoped
-
- affected_rows = relation.where(
- self.class.primary_key => id,
- lock_col => previous_lock_value,
- ).update_all(
- attributes_for_update(attribute_names).map do |name|
- [name, _read_attribute(name)]
- end.to_h
+ affected_rows = self.class._update_record(
+ attributes_with_values(attribute_names),
+ self.class.primary_key => id_in_database,
+ locking_column => previous_lock_value
)
- unless affected_rows == 1
- raise ActiveRecord::StaleObjectError.new(self, "update")
+ if affected_rows != 1
+ raise ActiveRecord::StaleObjectError.new(self, attempted_action)
end
affected_rows
- # If something went wrong, revert the version.
+ # If something went wrong, revert the locking_column value.
rescue Exception
- send(lock_col + '=', previous_lock_value)
+ self[locking_column] = previous_lock_value.to_i
raise
end
end
def destroy_row
- affected_rows = super
+ return super unless locking_enabled?
+
+ locking_column = self.class.locking_column
- if locking_enabled? && affected_rows != 1
+ affected_rows = self.class._delete_record(
+ self.class.primary_key => id_in_database,
+ locking_column => read_attribute_before_type_cast(locking_column)
+ )
+
+ if affected_rows != 1
raise ActiveRecord::StaleObjectError.new(self, "destroy")
end
affected_rows
end
- def relation_for_destroy
- relation = super
+ module ClassMethods
+ DEFAULT_LOCKING_COLUMN = "lock_version"
- if locking_enabled?
- locking_column = self.class.locking_column
- relation = relation.where(locking_column => _read_attribute(locking_column))
+ # Returns true if the +lock_optimistically+ flag is set to true
+ # (which it is, by default) and the table includes the
+ # +locking_column+ column (defaults to +lock_version+).
+ def locking_enabled?
+ lock_optimistically && columns_hash[locking_column]
end
- relation
- end
-
- module ClassMethods
- DEFAULT_LOCKING_COLUMN = 'lock_version'
-
- # Returns true if the +lock_optimistically+ flag is set to true
- # (which it is, by default) and the table includes the
- # +locking_column+ column (defaults to +lock_version+).
- def locking_enabled?
- lock_optimistically && columns_hash[locking_column]
- end
-
- # Set the column to use for optimistic locking. Defaults to +lock_version+.
- def locking_column=(value)
- reload_schema_from_cache
- @locking_column = value.to_s
- end
+ # Set the column to use for optimistic locking. Defaults to +lock_version+.
+ def locking_column=(value)
+ reload_schema_from_cache
+ @locking_column = value.to_s
+ end
- # The version column used for optimistic locking. Defaults to +lock_version+.
- def locking_column
- reset_locking_column unless defined?(@locking_column)
- @locking_column
- end
+ # The version column used for optimistic locking. Defaults to +lock_version+.
+ def locking_column
+ @locking_column = DEFAULT_LOCKING_COLUMN unless defined?(@locking_column)
+ @locking_column
+ end
- # Reset the column used for optimistic locking back to the +lock_version+ default.
- def reset_locking_column
- self.locking_column = DEFAULT_LOCKING_COLUMN
- end
+ # Reset the column used for optimistic locking back to the +lock_version+ default.
+ def reset_locking_column
+ self.locking_column = DEFAULT_LOCKING_COLUMN
+ end
- # Make sure the lock version column gets updated when counters are
- # updated.
- def update_counters(id, counters)
- counters = counters.merge(locking_column => 1) if locking_enabled?
- super
- end
+ # Make sure the lock version column gets updated when counters are
+ # updated.
+ def update_counters(id, counters)
+ counters = counters.merge(locking_column => 1) if locking_enabled?
+ super
+ end
- private
-
- # We need to apply this decorator here, rather than on module inclusion. The closure
- # created by the matcher would otherwise evaluate for `ActiveRecord::Base`, not the
- # sub class being decorated. As such, changes to `lock_optimistically`, or
- # `locking_column` would not be picked up.
- def inherited(subclass)
- subclass.class_eval do
- is_lock_column = ->(name, _) { lock_optimistically && name == locking_column }
- decorate_matching_attribute_types(is_lock_column, :_optimistic_locking) do |type|
- LockingType.new(type)
+ private
+
+ # We need to apply this decorator here, rather than on module inclusion. The closure
+ # created by the matcher would otherwise evaluate for `ActiveRecord::Base`, not the
+ # sub class being decorated. As such, changes to `lock_optimistically`, or
+ # `locking_column` would not be picked up.
+ def inherited(subclass)
+ subclass.class_eval do
+ is_lock_column = ->(name, _) { lock_optimistically && name == locking_column }
+ decorate_matching_attribute_types(is_lock_column, "_optimistic_locking") do |type|
+ LockingType.new(type)
+ end
+ end
+ super
end
- end
- super
end
- end
end
+ # In de/serialize we change `nil` to 0, so that we can allow passing
+ # `nil` values to `lock_version`, and not result in `ActiveRecord::StaleObjectError`
+ # during update record.
class LockingType < DelegateClass(Type::Value) # :nodoc:
def deserialize(value)
- # `nil` *should* be changed to 0
+ super.to_i
+ end
+
+ def serialize(value)
super.to_i
end
def init_with(coder)
- __setobj__(coder['subtype'])
+ __setobj__(coder["subtype"])
end
def encode_with(coder)
- coder['subtype'] = __getobj__
+ coder["subtype"] = __getobj__
end
end
end
diff --git a/activerecord/lib/active_record/locking/pessimistic.rb b/activerecord/lib/active_record/locking/pessimistic.rb
index 8ecdf76b72..130ef8a330 100644
--- a/activerecord/lib/active_record/locking/pessimistic.rb
+++ b/activerecord/lib/active_record/locking/pessimistic.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Locking
# Locking::Pessimistic provides support for row-level locking using
@@ -12,9 +14,9 @@ module ActiveRecord
# of your own such as 'LOCK IN SHARE MODE' or 'FOR UPDATE NOWAIT'. Example:
#
# Account.transaction do
- # # select * from accounts where name = 'shugo' limit 1 for update
- # shugo = Account.where("name = 'shugo'").lock(true).first
- # yuko = Account.where("name = 'yuko'").lock(true).first
+ # # select * from accounts where name = 'shugo' limit 1 for update nowait
+ # shugo = Account.lock("FOR UPDATE NOWAIT").find_by(name: "shugo")
+ # yuko = Account.lock("FOR UPDATE NOWAIT").find_by(name: "yuko")
# shugo.balance -= 100
# shugo.save!
# yuko.balance += 100
@@ -51,15 +53,25 @@ module ActiveRecord
# end
#
# Database-specific information on row locking:
- # MySQL: http://dev.mysql.com/doc/refman/5.7/en/innodb-locking-reads.html
- # PostgreSQL: http://www.postgresql.org/docs/current/interactive/sql-select.html#SQL-FOR-UPDATE-SHARE
+ # MySQL: https://dev.mysql.com/doc/refman/5.7/en/innodb-locking-reads.html
+ # PostgreSQL: https://www.postgresql.org/docs/current/interactive/sql-select.html#SQL-FOR-UPDATE-SHARE
module Pessimistic
# Obtain a row lock on this record. Reloads the record to obtain the requested
# lock. Pass an SQL locking clause to append the end of the SELECT statement
# or pass true for "FOR UPDATE" (the default, an exclusive row lock). Returns
# the locked record.
def lock!(lock = true)
- reload(:lock => lock) if persisted?
+ if persisted?
+ if has_changes_to_save?
+ raise(<<-MSG.squish)
+ Locking a record with unpersisted changes is not supported. Use
+ `save` to persist the changes, or `reload` to discard them
+ explicitly.
+ MSG
+ end
+
+ reload(lock: lock)
+ end
self
end
diff --git a/activerecord/lib/active_record/log_subscriber.rb b/activerecord/lib/active_record/log_subscriber.rb
index efa2a4df02..6b84431343 100644
--- a/activerecord/lib/active_record/log_subscriber.rb
+++ b/activerecord/lib/active_record/log_subscriber.rb
@@ -1,7 +1,11 @@
+# frozen_string_literal: true
+
module ActiveRecord
class LogSubscriber < ActiveSupport::LogSubscriber
IGNORE_PAYLOAD_NAMES = ["SCHEMA", "EXPLAIN"]
+ class_attribute :backtrace_cleaner, default: ActiveSupport::BacktraceCleaner.new
+
def self.runtime=(value)
ActiveRecord::RuntimeRegistry.sql_runtime = value
end
@@ -15,36 +19,24 @@ module ActiveRecord
rt
end
- def initialize
- super
- @odd = false
- end
-
- def render_bind(attribute)
- value = if attribute.type.binary? && attribute.value
- "<#{attribute.value.bytesize} bytes of binary data>"
- else
- attribute.value_for_database
- end
-
- [attribute.name, value]
- end
-
def sql(event)
- return unless logger.debug?
-
self.class.runtime += event.duration
+ return unless logger.debug?
payload = event.payload
return if IGNORE_PAYLOAD_NAMES.include?(payload[:name])
name = "#{payload[:name]} (#{event.duration.round(1)}ms)"
+ name = "CACHE #{name}" if payload[:cached]
sql = payload[:sql]
binds = nil
unless (payload[:binds] || []).empty?
- binds = " " + payload[:binds].map { |attr| render_bind(attr) }.inspect
+ casted_params = type_casted_binds(payload[:type_casted_binds])
+ binds = " " + payload[:binds].zip(casted_params).map { |attr, value|
+ render_bind(attr, value)
+ }.inspect
end
name = colorize_payload_name(name, payload[:name])
@@ -54,17 +46,30 @@ module ActiveRecord
end
private
+ def type_casted_binds(casted_binds)
+ casted_binds.respond_to?(:call) ? casted_binds.call : casted_binds
+ end
+
+ def render_bind(attr, value)
+ if attr.is_a?(Array)
+ attr = attr.first
+ elsif attr.type.binary? && attr.value
+ value = "<#{attr.value_for_database.to_s.bytesize} bytes of binary data>"
+ end
- def colorize_payload_name(name, payload_name)
- if payload_name.blank? || payload_name == "SQL" # SQL vs Model Load/Exists
- color(name, MAGENTA, true)
- else
- color(name, CYAN, true)
+ [attr && attr.name, value]
end
- end
- def sql_color(sql)
- case sql
+ def colorize_payload_name(name, payload_name)
+ if payload_name.blank? || payload_name == "SQL" # SQL vs Model Load/Exists
+ color(name, MAGENTA, true)
+ else
+ color(name, CYAN, true)
+ end
+ end
+
+ def sql_color(sql)
+ case sql
when /\A\s*rollback/mi
RED
when /select .*for update/mi, /\A\s*lock/mi
@@ -81,12 +86,32 @@ module ActiveRecord
CYAN
else
MAGENTA
+ end
end
- end
- def logger
- ActiveRecord::Base.logger
- end
+ def logger
+ ActiveRecord::Base.logger
+ end
+
+ def debug(progname = nil, &block)
+ return unless super
+
+ if ActiveRecord::Base.verbose_query_logs
+ log_query_source
+ end
+ end
+
+ def log_query_source
+ source = extract_query_source_location(caller)
+
+ if source
+ logger.debug(" ↳ #{source}")
+ end
+ end
+
+ def extract_query_source_location(locations)
+ backtrace_cleaner.clean(locations).first
+ end
end
end
diff --git a/activerecord/lib/active_record/middleware/database_selector.rb b/activerecord/lib/active_record/middleware/database_selector.rb
new file mode 100644
index 0000000000..b5b5df074c
--- /dev/null
+++ b/activerecord/lib/active_record/middleware/database_selector.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require "active_record/middleware/database_selector/resolver"
+
+module ActiveRecord
+ module Middleware
+ # The DatabaseSelector Middleware provides a framework for automatically
+ # swapping from the primary to the replica database connection. Rails
+ # provides a basic framework to determine when to swap and allows for
+ # applications to write custom strategy classes to override the default
+ # behavior.
+ #
+ # The resolver class defines when the application should switch (i.e. read
+ # from the primary if a write occurred less than 2 seconds ago) and a
+ # resolver context class that sets a value that helps the resolver class
+ # decide when to switch.
+ #
+ # Rails default middleware uses the request's session to set a timestamp
+ # that informs the application when to read from a primary or read from a
+ # replica.
+ #
+ # To use the DatabaseSelector in your application with default settings add
+ # the following options to your environment config:
+ #
+ # config.active_record.database_selector = { delay: 2.seconds }
+ # config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
+ # config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
+ #
+ # New applications will include these lines commented out in the production.rb.
+ #
+ # The default behavior can be changed by setting the config options to a
+ # custom class:
+ #
+ # config.active_record.database_selector = { delay: 2.seconds }
+ # config.active_record.database_resolver = MyResolver
+ # config.active_record.database_resolver_context = MyResolver::MySession
+ class DatabaseSelector
+ def initialize(app, resolver_klass = Resolver, context_klass = Resolver::Session, options = {})
+ @app = app
+ @resolver_klass = resolver_klass
+ @context_klass = context_klass
+ @options = options
+ end
+
+ attr_reader :resolver_klass, :context_klass, :options
+
+ # Middleware that determines which database connection to use in a multiple
+ # database application.
+ def call(env)
+ request = ActionDispatch::Request.new(env)
+
+ select_database(request) do
+ @app.call(env)
+ end
+ end
+
+ private
+
+ def select_database(request, &blk)
+ context = context_klass.call(request)
+ resolver = resolver_klass.call(context, options)
+
+ if reading_request?(request)
+ resolver.read(&blk)
+ else
+ resolver.write(&blk)
+ end
+ end
+
+ def reading_request?(request)
+ request.get? || request.head?
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/middleware/database_selector/resolver.rb b/activerecord/lib/active_record/middleware/database_selector/resolver.rb
new file mode 100644
index 0000000000..80b8cd7cae
--- /dev/null
+++ b/activerecord/lib/active_record/middleware/database_selector/resolver.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require "active_record/middleware/database_selector/resolver/session"
+
+module ActiveRecord
+ module Middleware
+ class DatabaseSelector
+ # The Resolver class is used by the DatabaseSelector middleware to
+ # determine which database the request should use.
+ #
+ # To change the behavior of the Resolver class in your application,
+ # create a custom resolver class that inherits from
+ # DatabaseSelector::Resolver and implements the methods that need to
+ # be changed.
+ #
+ # By default the Resolver class will send read traffic to the replica
+ # if it's been 2 seconds since the last write.
+ class Resolver # :nodoc:
+ SEND_TO_REPLICA_DELAY = 2.seconds
+
+ def self.call(context, options = {})
+ new(context, options)
+ end
+
+ def initialize(context, options = {})
+ @context = context
+ @options = options
+ @delay = @options && @options[:delay] ? @options[:delay] : SEND_TO_REPLICA_DELAY
+ @instrumenter = ActiveSupport::Notifications.instrumenter
+ end
+
+ attr_reader :context, :delay, :instrumenter
+
+ def read(&blk)
+ if read_from_primary?
+ read_from_primary(&blk)
+ else
+ read_from_replica(&blk)
+ end
+ end
+
+ def write(&blk)
+ write_to_primary(&blk)
+ end
+
+ private
+
+ def read_from_primary(&blk)
+ ActiveRecord::Base.connection.while_preventing_writes do
+ ActiveRecord::Base.connected_to(role: ActiveRecord::Base.writing_role) do
+ instrumenter.instrument("database_selector.active_record.read_from_primary") do
+ yield
+ end
+ end
+ end
+ end
+
+ def read_from_replica(&blk)
+ ActiveRecord::Base.connected_to(role: ActiveRecord::Base.reading_role) do
+ instrumenter.instrument("database_selector.active_record.read_from_replica") do
+ yield
+ end
+ end
+ end
+
+ def write_to_primary(&blk)
+ ActiveRecord::Base.connected_to(role: ActiveRecord::Base.writing_role) do
+ instrumenter.instrument("database_selector.active_record.wrote_to_primary") do
+ yield
+ ensure
+ context.update_last_write_timestamp
+ end
+ end
+ end
+
+ def read_from_primary?
+ !time_since_last_write_ok?
+ end
+
+ def send_to_replica_delay
+ delay
+ end
+
+ def time_since_last_write_ok?
+ Time.now - context.last_write_timestamp >= send_to_replica_delay
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/middleware/database_selector/resolver/session.rb b/activerecord/lib/active_record/middleware/database_selector/resolver/session.rb
new file mode 100644
index 0000000000..df7af054b7
--- /dev/null
+++ b/activerecord/lib/active_record/middleware/database_selector/resolver/session.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Middleware
+ class DatabaseSelector
+ class Resolver
+ # The session class is used by the DatabaseSelector::Resolver to save
+ # timestamps of the last write in the session.
+ #
+ # The last_write is used to determine whether it's safe to read
+ # from the replica or the request needs to be sent to the primary.
+ class Session # :nodoc:
+ def self.call(request)
+ new(request.session)
+ end
+
+ # Converts time to a timestamp that represents milliseconds since
+ # epoch.
+ def self.convert_time_to_timestamp(time)
+ time.to_i * 1000 + time.usec / 1000
+ end
+
+ # Converts milliseconds since epoch timestamp into a time object.
+ def self.convert_timestamp_to_time(timestamp)
+ timestamp ? Time.at(timestamp / 1000, (timestamp % 1000) * 1000) : Time.at(0)
+ end
+
+ def initialize(session)
+ @session = session
+ end
+
+ attr_reader :session
+
+ def last_write_timestamp
+ self.class.convert_timestamp_to_time(session[:last_write])
+ end
+
+ def update_last_write_timestamp
+ session[:last_write] = self.class.convert_time_to_timestamp(Time.now)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/migration.rb b/activerecord/lib/active_record/migration.rb
index a5c2985132..997b7f763a 100644
--- a/activerecord/lib/active_record/migration.rb
+++ b/activerecord/lib/active_record/migration.rb
@@ -1,5 +1,9 @@
+# frozen_string_literal: true
+
+require "benchmark"
+require "set"
+require "zlib"
require "active_support/core_ext/module/attribute_accessors"
-require 'set'
module ActiveRecord
class MigrationError < ActiveRecordError#:nodoc:
@@ -19,7 +23,7 @@ module ActiveRecord
# t.string :zipcode
# end
#
- # execute <<-SQL
+ # execute <<~SQL
# ALTER TABLE distributors
# ADD CONSTRAINT zipchk
# CHECK (char_length(zipcode) = 5) NO INHERIT;
@@ -37,7 +41,7 @@ module ActiveRecord
# t.string :zipcode
# end
#
- # execute <<-SQL
+ # execute <<~SQL
# ALTER TABLE distributors
# ADD CONSTRAINT zipchk
# CHECK (char_length(zipcode) = 5) NO INHERIT;
@@ -45,7 +49,7 @@ module ActiveRecord
# end
#
# def down
- # execute <<-SQL
+ # execute <<~SQL
# ALTER TABLE distributors
# DROP CONSTRAINT zipchk
# SQL
@@ -64,7 +68,7 @@ module ActiveRecord
#
# reversible do |dir|
# dir.up do
- # execute <<-SQL
+ # execute <<~SQL
# ALTER TABLE distributors
# ADD CONSTRAINT zipchk
# CHECK (char_length(zipcode) = 5) NO INHERIT;
@@ -72,7 +76,7 @@ module ActiveRecord
# end
#
# dir.down do
- # execute <<-SQL
+ # execute <<~SQL
# ALTER TABLE distributors
# DROP CONSTRAINT zipchk
# SQL
@@ -126,9 +130,9 @@ module ActiveRecord
class PendingMigrationError < MigrationError#:nodoc:
def initialize(message = nil)
if !message && defined?(Rails.env)
- super("Migrations are pending. To resolve this issue, run:\n\n\tbin/rails db:migrate RAILS_ENV=#{::Rails.env}")
+ super("Migrations are pending. To resolve this issue, run:\n\n rails db:migrate RAILS_ENV=#{::Rails.env}")
elsif !message
- super("Migrations are pending. To resolve this issue, run:\n\n\tbin/rails db:migrate")
+ super("Migrations are pending. To resolve this issue, run:\n\n rails db:migrate")
else
super
end
@@ -136,7 +140,8 @@ module ActiveRecord
end
class ConcurrentMigrationError < MigrationError #:nodoc:
- DEFAULT_MESSAGE = "Cannot run migrations because another migration process is currently running.".freeze
+ DEFAULT_MESSAGE = "Cannot run migrations because another migration process is currently running."
+ RELEASE_LOCK_FAILED_MESSAGE = "Failed to release advisory lock"
def initialize(message = DEFAULT_MESSAGE)
super
@@ -145,7 +150,7 @@ module ActiveRecord
class NoEnvironmentInSchemaError < MigrationError #:nodoc:
def initialize
- msg = "Environment data not found in the schema. To resolve this issue, run: \n\n\tbin/rails db:environment:set"
+ msg = "Environment data not found in the schema. To resolve this issue, run: \n\n rails db:environment:set"
if defined?(Rails.env)
super("#{msg} RAILS_ENV=#{::Rails.env}")
else
@@ -156,8 +161,8 @@ module ActiveRecord
class ProtectedEnvironmentError < ActiveRecordError #:nodoc:
def initialize(env = "production")
- msg = "You are attempting to run a destructive action against your '#{env}' database\n"
- msg << "If you are sure you want to continue, run the same command with the environment variable\n"
+ msg = +"You are attempting to run a destructive action against your '#{env}' database.\n"
+ msg << "If you are sure you want to continue, run the same command with the environment variable:\n"
msg << "DISABLE_DATABASE_ENVIRONMENT_CHECK=1"
super(msg)
end
@@ -165,14 +170,14 @@ module ActiveRecord
class EnvironmentMismatchError < ActiveRecordError
def initialize(current: nil, stored: nil)
- msg = "You are attempting to modify a database that was last run in `#{ stored }` environment.\n"
- msg << "You are running in `#{ current }` environment."
+ msg = +"You are attempting to modify a database that was last run in `#{ stored }` environment.\n"
+ msg << "You are running in `#{ current }` environment. "
msg << "If you are sure you want to continue, first set the environment using:\n\n"
- msg << "\tbin/rails db:environment:set"
+ msg << " rails db:environment:set"
if defined?(Rails.env)
- super("#{msg} RAILS_ENV=#{::Rails.env}")
+ super("#{msg} RAILS_ENV=#{::Rails.env}\n\n")
else
- super(msg)
+ super("#{msg}\n\n")
end
end
end
@@ -276,8 +281,10 @@ module ActiveRecord
#
# * <tt>change_column(table_name, column_name, type, options)</tt>: Changes
# the column to a different type using the same parameters as add_column.
- # * <tt>change_column_default(table_name, column_name, default)</tt>: Sets a
- # default value for +column_name+ definded by +default+ on +table_name+.
+ # * <tt>change_column_default(table_name, column_name, default_or_changes)</tt>:
+ # Sets a default value for +column_name+ defined by +default_or_changes+ on
+ # +table_name+. Passing a hash containing <tt>:from</tt> and <tt>:to</tt>
+ # as +default_or_changes+ will make this change reversible in the migration.
# * <tt>change_column_null(table_name, column_name, null, default = nil)</tt>:
# Sets or removes a +NOT NULL+ constraint on +column_name+. The +null+ flag
# indicates whether the value can be +NULL+. See
@@ -301,7 +308,7 @@ module ActiveRecord
# named +column_name+ from the table called +table_name+.
# * <tt>remove_columns(table_name, *column_names)</tt>: Removes the given
# columns from the table definition.
- # * <tt>remove_foreign_key(from_table, options_or_to_table)</tt>: Removes the
+ # * <tt>remove_foreign_key(from_table, to_table = nil, **options)</tt>: Removes the
# given foreign key from the table called +table_name+.
# * <tt>remove_index(table_name, column: column_names)</tt>: Removes the index
# specified by +column_names+.
@@ -345,13 +352,13 @@ module ActiveRecord
# <tt>rails db:migrate</tt>. This will update the database by running all of the
# pending migrations, creating the <tt>schema_migrations</tt> table
# (see "About the schema_migrations table" section below) if missing. It will also
- # invoke the db:schema:dump task, which will update your db/schema.rb file
+ # invoke the db:schema:dump command, which will update your db/schema.rb file
# to match the structure of your database.
#
# To roll the database back to a previous migration version, use
- # <tt>rails db:migrate VERSION=X</tt> where <tt>X</tt> is the version to which
+ # <tt>rails db:rollback VERSION=X</tt> where <tt>X</tt> is the version to which
# you wish to downgrade. Alternatively, you can also use the STEP option if you
- # wish to rollback last few migrations. <tt>rails db:migrate STEP=2</tt> will rollback
+ # wish to rollback last few migrations. <tt>rails db:rollback STEP=2</tt> will rollback
# the latest two migrations.
#
# If any of the migrations throw an <tt>ActiveRecord::IrreversibleMigration</tt> exception,
@@ -509,8 +516,8 @@ module ActiveRecord
# Remember that you can still open your own transactions, even if you
# are in a Migration with <tt>self.disable_ddl_transaction!</tt>.
class Migration
- autoload :CommandRecorder, 'active_record/migration/command_recorder'
- autoload :Compatibility, 'active_record/migration/compatibility'
+ autoload :CommandRecorder, "active_record/migration/command_recorder"
+ autoload :Compatibility, "active_record/migration/compatibility"
# This must be defined before the inherited hook, below
class Current < Migration # :nodoc:
@@ -519,22 +526,19 @@ module ActiveRecord
def self.inherited(subclass) # :nodoc:
super
if subclass.superclass == Migration
- subclass.include Compatibility::Legacy
+ raise StandardError, "Directly inheriting from ActiveRecord::Migration is not supported. " \
+ "Please specify the Rails release the migration was written for:\n" \
+ "\n" \
+ " class #{subclass} < ActiveRecord::Migration[4.2]"
end
end
def self.[](version)
- version = version.to_s
- name = "V#{version.tr('.', '_')}"
- unless Compatibility.const_defined?(name)
- versions = Compatibility.constants.grep(/\AV[0-9_]+\z/).map { |s| s.to_s.delete('V').tr('_', '.').inspect }
- raise ArgumentError, "Unknown migration version #{version.inspect}; expected one of #{versions.sort.join(', ')}"
- end
- Compatibility.const_get(name)
+ Compatibility.find(version)
end
def self.current_version
- Rails.version.to_f
+ ActiveRecord::VERSION::STRING.to_f
end
MigrationFilenameRegexp = /\A([0-9]+)_([_a-z0-9]*)\.?([_a-z0-9]*)?\.rb\z/ # :nodoc:
@@ -548,21 +552,19 @@ module ActiveRecord
end
def call(env)
- if connection.supports_migrations?
- mtime = ActiveRecord::Migrator.last_migration.mtime.to_i
- if @last_check < mtime
- ActiveRecord::Migration.check_pending!(connection)
- @last_check = mtime
- end
+ mtime = ActiveRecord::Base.connection.migration_context.last_migration.mtime.to_i
+ if @last_check < mtime
+ ActiveRecord::Migration.check_pending!(connection)
+ @last_check = mtime
end
@app.call(env)
end
private
- def connection
- ActiveRecord::Base.connection
- end
+ def connection
+ ActiveRecord::Base.connection
+ end
end
class << self
@@ -575,13 +577,14 @@ module ActiveRecord
# Raises <tt>ActiveRecord::PendingMigrationError</tt> error if any migrations are pending.
def check_pending!(connection = Base.connection)
- raise ActiveRecord::PendingMigrationError if ActiveRecord::Migrator.needs_migration?(connection)
+ raise ActiveRecord::PendingMigrationError if connection.migration_context.needs_migration?
end
def load_schema_if_pending!
- if ActiveRecord::Migrator.needs_migration? || !ActiveRecord::Migrator.any_migrations?
+ if Base.connection.migration_context.needs_migration? || !Base.connection.migration_context.any_migrations?
# Roundtrip to Rake to allow plugins to hook into database initialization.
- FileUtils.cd Rails.root do
+ root = defined?(ENGINE_ROOT) ? ENGINE_ROOT : Rails.root
+ FileUtils.cd(root) do
current_config = Base.connection_config
Base.clear_all_connections!
system("bin/rails db:test:prepare")
@@ -675,15 +678,13 @@ module ActiveRecord
if connection.respond_to? :revert
connection.revert { yield }
else
- recorder = CommandRecorder.new(connection)
+ recorder = command_recorder
@connection = recorder
suppress_messages do
connection.revert { yield }
end
@connection = recorder.delegate
- recorder.commands.each do |cmd, args, block|
- send(cmd, *args, &block)
- end
+ recorder.replay(self)
end
end
end
@@ -692,7 +693,7 @@ module ActiveRecord
connection.respond_to?(:reverting) && connection.reverting
end
- class ReversibleBlockHelper < Struct.new(:reverting) # :nodoc:
+ ReversibleBlockHelper = Struct.new(:reverting) do # :nodoc:
def up
yield unless reverting
end
@@ -730,7 +731,25 @@ module ActiveRecord
# end
def reversible
helper = ReversibleBlockHelper.new(reverting?)
- execute_block{ yield helper }
+ execute_block { yield helper }
+ end
+
+ # Used to specify an operation that is only run when migrating up
+ # (for example, populating a new column with its initial values).
+ #
+ # In the following example, the new column +published+ will be given
+ # the value +true+ for all existing records.
+ #
+ # class AddPublishedToPosts < ActiveRecord::Migration[5.2]
+ # def change
+ # add_column :posts, :published, :boolean, default: false
+ # up_only do
+ # execute "update posts set published = 'true'"
+ # end
+ # end
+ # end
+ def up_only
+ execute_block { yield } unless reverting?
end
# Runs the given migration classes.
@@ -772,7 +791,7 @@ module ActiveRecord
when :down then announce "reverting"
end
- time = nil
+ time = nil
ActiveRecord::Base.connection_pool.with_connection do |conn|
time = Benchmark.measure do
exec_migration(conn, direction)
@@ -800,7 +819,7 @@ module ActiveRecord
@connection = nil
end
- def write(text="")
+ def write(text = "")
puts(text) if verbose
end
@@ -810,10 +829,14 @@ module ActiveRecord
write "== %s %s" % [text, "=" * length]
end
- def say(message, subitem=false)
+ # Takes a message argument and outputs it as is.
+ # A second boolean argument can be passed to specify whether to indent or not.
+ def say(message, subitem = false)
write "#{subitem ? " ->" : "--"} #{message}"
end
+ # Outputs text along with how long it took to run its block.
+ # If the block returns an integer it assumes it is the number of rows affected.
def say_with_time(message)
say(message)
result = nil
@@ -823,6 +846,7 @@ module ActiveRecord
result
end
+ # Takes a block as an argument and suppresses any output generated by the block.
def suppress_messages
save, self.verbose = verbose, false
yield
@@ -835,7 +859,7 @@ module ActiveRecord
end
def method_missing(method, *arguments, &block)
- arg_list = arguments.map(&:inspect) * ', '
+ arg_list = arguments.map(&:inspect) * ", "
say_with_time "#{method}(#{arg_list})" do
unless connection.respond_to? :revert
@@ -857,23 +881,25 @@ module ActiveRecord
FileUtils.mkdir_p(destination) unless File.exist?(destination)
- destination_migrations = ActiveRecord::Migrator.migrations(destination)
+ destination_migrations = ActiveRecord::MigrationContext.new(destination).migrations
last = destination_migrations.last
sources.each do |scope, path|
- source_migrations = ActiveRecord::Migrator.migrations(path)
+ source_migrations = ActiveRecord::MigrationContext.new(path).migrations
source_migrations.each do |migration|
source = File.binread(migration.filename)
inserted_comment = "# This migration comes from #{scope} (originally #{migration.version})\n"
- if /\A#.*\b(?:en)?coding:\s*\S+/ =~ source
+ magic_comments = +""
+ loop do
# If we have a magic comment in the original migration,
# insert our comment after the first newline(end of the magic comment line)
# so the magic keep working.
# Note that magic comments must be at the first line(except sh-bang).
- source[/\n/] = "\n#{inserted_comment}"
- else
- source = "#{inserted_comment}#{source}"
+ source.sub!(/\A(?:#.*\b(?:en)?coding:\s*\S+|#\s*frozen_string_literal:\s*(?:true|false)).*\n/) do |magic_comment|
+ magic_comments << magic_comment; ""
+ end || break
end
+ source = "#{magic_comments}#{inserted_comment}#{source}"
if duplicate = destination_migrations.detect { |m| m.name == migration.name }
if options[:on_skip] && duplicate.scope != scope.to_s
@@ -927,19 +953,22 @@ module ActiveRecord
end
private
- def execute_block
- if connection.respond_to? :execute_block
- super # use normal delegation to record the block
- else
- yield
+ def execute_block
+ if connection.respond_to? :execute_block
+ super # use normal delegation to record the block
+ else
+ yield
+ end
+ end
+
+ def command_recorder
+ CommandRecorder.new(connection)
end
- end
end
# MigrationProxy is used to defer loading of the actual migration classes
# until they are needed
- class MigrationProxy < Struct.new(:name, :version, :filename, :scope)
-
+ MigrationProxy = Struct.new(:name, :version, :filename, :scope) do
def initialize(name, version, filename, scope)
super
@migration = nil
@@ -965,7 +994,6 @@ module ActiveRecord
require(File.expand_path(filename))
name.constantize.new(name, version)
end
-
end
class NullMigration < MigrationProxy #:nodoc:
@@ -978,132 +1006,178 @@ module ActiveRecord
end
end
- class Migrator#:nodoc:
- class << self
- attr_writer :migrations_paths
- alias :migrations_path= :migrations_paths=
-
- def migrate(migrations_paths, target_version = nil, &block)
- case
- when target_version.nil?
- up(migrations_paths, target_version, &block)
- when current_version == 0 && target_version == 0
- []
- when current_version > target_version
- down(migrations_paths, target_version, &block)
- else
- up(migrations_paths, target_version, &block)
- end
- end
+ class MigrationContext # :nodoc:
+ attr_reader :migrations_paths
- def rollback(migrations_paths, steps=1)
- move(:down, migrations_paths, steps)
- end
+ def initialize(migrations_paths)
+ @migrations_paths = migrations_paths
+ end
- def forward(migrations_paths, steps=1)
- move(:up, migrations_paths, steps)
+ def migrate(target_version = nil, &block)
+ case
+ when target_version.nil?
+ up(target_version, &block)
+ when current_version == 0 && target_version == 0
+ []
+ when current_version > target_version
+ down(target_version, &block)
+ else
+ up(target_version, &block)
end
+ end
- def up(migrations_paths, target_version = nil)
- migrations = migrations(migrations_paths)
- migrations.select! { |m| yield m } if block_given?
+ def rollback(steps = 1)
+ move(:down, steps)
+ end
- new(:up, migrations, target_version).migrate
+ def forward(steps = 1)
+ move(:up, steps)
+ end
+
+ def up(target_version = nil)
+ selected_migrations = if block_given?
+ migrations.select { |m| yield m }
+ else
+ migrations
end
- def down(migrations_paths, target_version = nil)
- migrations = migrations(migrations_paths)
- migrations.select! { |m| yield m } if block_given?
+ Migrator.new(:up, selected_migrations, target_version).migrate
+ end
- new(:down, migrations, target_version).migrate
+ def down(target_version = nil)
+ selected_migrations = if block_given?
+ migrations.select { |m| yield m }
+ else
+ migrations
end
- def run(direction, migrations_paths, target_version)
- new(direction, migrations(migrations_paths), target_version).run
- end
+ Migrator.new(:down, selected_migrations, target_version).migrate
+ end
- def open(migrations_paths)
- new(:up, migrations(migrations_paths), nil)
- end
+ def run(direction, target_version)
+ Migrator.new(direction, migrations, target_version).run
+ end
- def schema_migrations_table_name
- SchemaMigration.table_name
- end
+ def open
+ Migrator.new(:up, migrations, nil)
+ end
- def get_all_versions(connection = Base.connection)
- ActiveSupport::Deprecation.silence do
- if connection.table_exists?(schema_migrations_table_name)
- SchemaMigration.all.map { |x| x.version.to_i }.sort
- else
- []
- end
- end
+ def get_all_versions
+ if SchemaMigration.table_exists?
+ SchemaMigration.all_versions.map(&:to_i)
+ else
+ []
end
+ end
- def current_version(connection = Base.connection)
- get_all_versions(connection).max || 0
- end
+ def current_version
+ get_all_versions.max || 0
+ rescue ActiveRecord::NoDatabaseError
+ end
- def needs_migration?(connection = Base.connection)
- (migrations(migrations_paths).collect(&:version) - get_all_versions(connection)).size > 0
- end
+ def needs_migration?
+ (migrations.collect(&:version) - get_all_versions).size > 0
+ end
- def any_migrations?
- migrations(migrations_paths).any?
- end
+ def any_migrations?
+ migrations.any?
+ end
- def last_migration #:nodoc:
- migrations(migrations_paths).last || NullMigration.new
- end
+ def last_migration #:nodoc:
+ migrations.last || NullMigration.new
+ end
- def migrations_paths
- @migrations_paths ||= ['db/migrate']
- # just to not break things if someone uses: migrations_path = some_string
- Array(@migrations_paths)
- end
+ def migrations
+ migrations = migration_files.map do |file|
+ version, name, scope = parse_migration_filename(file)
+ raise IllegalMigrationNameError.new(file) unless version
+ version = version.to_i
+ name = name.camelize
- def match_to_migration_filename?(filename) # :nodoc:
- File.basename(filename) =~ Migration::MigrationFilenameRegexp
+ MigrationProxy.new(name, version, file, scope)
end
- def parse_migration_filename(filename) # :nodoc:
- File.basename(filename).scan(Migration::MigrationFilenameRegexp).first
+ migrations.sort_by(&:version)
+ end
+
+ def migrations_status
+ db_list = ActiveRecord::SchemaMigration.normalized_versions
+
+ file_list = migration_files.map do |file|
+ version, name, scope = parse_migration_filename(file)
+ raise IllegalMigrationNameError.new(file) unless version
+ version = ActiveRecord::SchemaMigration.normalize_migration_number(version)
+ status = db_list.delete(version) ? "up" : "down"
+ [status, version, (name + scope).humanize]
+ end.compact
+
+ db_list.map! do |version|
+ ["up", version, "********** NO FILE **********"]
end
- def migrations(paths)
- paths = Array(paths)
+ (db_list + file_list).sort_by { |_, version, _| version }
+ end
- files = Dir[*paths.map { |p| "#{p}/**/[0-9]*_*.rb" }]
+ def current_environment
+ ActiveRecord::ConnectionHandling::DEFAULT_ENV.call
+ end
- migrations = files.map do |file|
- version, name, scope = parse_migration_filename(file)
- raise IllegalMigrationNameError.new(file) unless version
- version = version.to_i
- name = name.camelize
+ def protected_environment?
+ ActiveRecord::Base.protected_environments.include?(last_stored_environment) if last_stored_environment
+ end
- MigrationProxy.new(name, version, file, scope)
- end
+ def last_stored_environment
+ return nil if current_version == 0
+ raise NoEnvironmentInSchemaError unless ActiveRecord::InternalMetadata.table_exists?
+
+ environment = ActiveRecord::InternalMetadata[:environment]
+ raise NoEnvironmentInSchemaError unless environment
+ environment
+ end
- migrations.sort_by(&:version)
+ private
+ def migration_files
+ paths = Array(migrations_paths)
+ Dir[*paths.flat_map { |path| "#{path}/**/[0-9]*_*.rb" }]
end
- private
+ def parse_migration_filename(filename)
+ File.basename(filename).scan(Migration::MigrationFilenameRegexp).first
+ end
- def move(direction, migrations_paths, steps)
- migrator = new(direction, migrations(migrations_paths))
- start_index = migrator.migrations.index(migrator.current_migration)
+ def move(direction, steps)
+ migrator = Migrator.new(direction, migrations)
- if start_index
- finish = migrator.migrations[start_index + steps]
- version = finish ? finish.version : 0
- send(direction, migrations_paths, version)
+ if current_version != 0 && !migrator.current_migration
+ raise UnknownMigrationVersionError.new(current_version)
end
+
+ start_index =
+ if current_version == 0
+ 0
+ else
+ migrator.migrations.index(migrator.current_migration)
+ end
+
+ finish = migrator.migrations[start_index + steps]
+ version = finish ? finish.version : 0
+ send(direction, version)
+ end
+ end
+
+ class Migrator # :nodoc:
+ class << self
+ attr_accessor :migrations_paths
+
+ # For cases where a table doesn't exist like loading from schema cache
+ def current_version
+ MigrationContext.new(migrations_paths).current_version
end
end
- def initialize(direction, migrations, target_version = nil)
- raise StandardError.new("This database does not yet support migrations") unless Base.connection.supports_migrations?
+ self.migrations_paths = ["db/migrate"]
+ def initialize(direction, migrations, target_version = nil)
@direction = direction
@target_version = target_version
@migrated_versions = nil
@@ -1111,8 +1185,8 @@ module ActiveRecord
validate(@migrations)
- Base.connection.initialize_schema_migrations_table
- Base.connection.initialize_internal_metadata_table
+ ActiveRecord::SchemaMigration.create_table
+ ActiveRecord::InternalMetadata.create_table
end
def current_version
@@ -1165,151 +1239,141 @@ module ActiveRecord
end
def load_migrated
- @migrated_versions = Set.new(self.class.get_all_versions)
+ @migrated_versions = Set.new(Base.connection.migration_context.get_all_versions)
end
private
- # Used for running a specific migration.
- def run_without_lock
- migration = migrations.detect { |m| m.version == @target_version }
- raise UnknownMigrationVersionError.new(@target_version) if migration.nil?
- execute_migration_in_transaction(migration, @direction)
+ # Used for running a specific migration.
+ def run_without_lock
+ migration = migrations.detect { |m| m.version == @target_version }
+ raise UnknownMigrationVersionError.new(@target_version) if migration.nil?
+ result = execute_migration_in_transaction(migration, @direction)
- record_environment
- end
-
- # Used for running multiple migrations up to or down to a certain value.
- def migrate_without_lock
- if invalid_target?
- raise UnknownMigrationVersionError.new(@target_version)
+ record_environment
+ result
end
- runnable.each do |migration|
- execute_migration_in_transaction(migration, @direction)
- end
-
- record_environment
- end
-
- # Stores the current environment in the database.
- def record_environment
- return if down?
- ActiveRecord::InternalMetadata[:environment] = ActiveRecord::Migrator.current_environment
- end
-
- def ran?(migration)
- migrated.include?(migration.version.to_i)
- end
+ # Used for running multiple migrations up to or down to a certain value.
+ def migrate_without_lock
+ if invalid_target?
+ raise UnknownMigrationVersionError.new(@target_version)
+ end
- # Return true if a valid version is not provided.
- def invalid_target?
- !target && @target_version && @target_version > 0
- end
+ result = runnable.each do |migration|
+ execute_migration_in_transaction(migration, @direction)
+ end
- def execute_migration_in_transaction(migration, direction)
- return if down? && !migrated.include?(migration.version.to_i)
- return if up? && migrated.include?(migration.version.to_i)
+ record_environment
+ result
+ end
- Base.logger.info "Migrating to #{migration.name} (#{migration.version})" if Base.logger
+ # Stores the current environment in the database.
+ def record_environment
+ return if down?
+ ActiveRecord::InternalMetadata[:environment] = ActiveRecord::Base.connection.migration_context.current_environment
+ end
- ddl_transaction(migration) do
- migration.migrate(direction)
- record_version_state_after_migrating(migration.version)
+ def ran?(migration)
+ migrated.include?(migration.version.to_i)
end
- rescue => e
- msg = "An error has occurred, "
- msg << "this and " if use_transaction?(migration)
- msg << "all later migrations canceled:\n\n#{e}"
- raise StandardError, msg, e.backtrace
- end
- def target
- migrations.detect { |m| m.version == @target_version }
- end
+ # Return true if a valid version is not provided.
+ def invalid_target?
+ @target_version && @target_version != 0 && !target
+ end
- def finish
- migrations.index(target) || migrations.size - 1
- end
+ def execute_migration_in_transaction(migration, direction)
+ return if down? && !migrated.include?(migration.version.to_i)
+ return if up? && migrated.include?(migration.version.to_i)
- def start
- up? ? 0 : (migrations.index(current) || 0)
- end
+ Base.logger.info "Migrating to #{migration.name} (#{migration.version})" if Base.logger
- def validate(migrations)
- name ,= migrations.group_by(&:name).find { |_,v| v.length > 1 }
- raise DuplicateMigrationNameError.new(name) if name
+ ddl_transaction(migration) do
+ migration.migrate(direction)
+ record_version_state_after_migrating(migration.version)
+ end
+ rescue => e
+ msg = +"An error has occurred, "
+ msg << "this and " if use_transaction?(migration)
+ msg << "all later migrations canceled:\n\n#{e}"
+ raise StandardError, msg, e.backtrace
+ end
- version ,= migrations.group_by(&:version).find { |_,v| v.length > 1 }
- raise DuplicateMigrationVersionError.new(version) if version
- end
+ def target
+ migrations.detect { |m| m.version == @target_version }
+ end
- def record_version_state_after_migrating(version)
- if down?
- migrated.delete(version)
- ActiveRecord::SchemaMigration.where(:version => version.to_s).delete_all
- else
- migrated << version
- ActiveRecord::SchemaMigration.create!(version: version.to_s)
+ def finish
+ migrations.index(target) || migrations.size - 1
end
- end
- def self.last_stored_environment
- return nil if current_version == 0
- raise NoEnvironmentInSchemaError unless ActiveRecord::InternalMetadata.table_exists?
+ def start
+ up? ? 0 : (migrations.index(current) || 0)
+ end
- environment = ActiveRecord::InternalMetadata[:environment]
- raise NoEnvironmentInSchemaError unless environment
- environment
- end
+ def validate(migrations)
+ name, = migrations.group_by(&:name).find { |_, v| v.length > 1 }
+ raise DuplicateMigrationNameError.new(name) if name
- def self.current_environment
- ActiveRecord::ConnectionHandling::DEFAULT_ENV.call
- end
+ version, = migrations.group_by(&:version).find { |_, v| v.length > 1 }
+ raise DuplicateMigrationVersionError.new(version) if version
+ end
- def self.protected_environment?
- ActiveRecord::Base.protected_environments.include?(last_stored_environment) if last_stored_environment
- end
+ def record_version_state_after_migrating(version)
+ if down?
+ migrated.delete(version)
+ ActiveRecord::SchemaMigration.delete_by(version: version.to_s)
+ else
+ migrated << version
+ ActiveRecord::SchemaMigration.create!(version: version.to_s)
+ end
+ end
- def up?
- @direction == :up
- end
+ def up?
+ @direction == :up
+ end
- def down?
- @direction == :down
- end
+ def down?
+ @direction == :down
+ end
- # Wrap the migration in a transaction only if supported by the adapter.
- def ddl_transaction(migration)
- if use_transaction?(migration)
- Base.transaction { yield }
- else
- yield
+ # Wrap the migration in a transaction only if supported by the adapter.
+ def ddl_transaction(migration)
+ if use_transaction?(migration)
+ Base.transaction { yield }
+ else
+ yield
+ end
end
- end
- def use_transaction?(migration)
- !migration.disable_ddl_transaction && Base.connection.supports_ddl_transactions?
- end
+ def use_transaction?(migration)
+ !migration.disable_ddl_transaction && Base.connection.supports_ddl_transactions?
+ end
- def use_advisory_lock?
- Base.connection.supports_advisory_locks?
- end
+ def use_advisory_lock?
+ Base.connection.advisory_locks_enabled?
+ end
- def with_advisory_lock
- lock_id = generate_migrator_advisory_lock_id
- got_lock = Base.connection.get_advisory_lock(lock_id)
- raise ConcurrentMigrationError unless got_lock
- load_migrated # reload schema_migrations to be sure it wasn't changed by another process before we got the lock
- yield
- ensure
- Base.connection.release_advisory_lock(lock_id) if got_lock
- end
+ def with_advisory_lock
+ lock_id = generate_migrator_advisory_lock_id
+ connection = Base.connection
+ got_lock = connection.get_advisory_lock(lock_id)
+ raise ConcurrentMigrationError unless got_lock
+ load_migrated # reload schema_migrations to be sure it wasn't changed by another process before we got the lock
+ yield
+ ensure
+ if got_lock && !connection.release_advisory_lock(lock_id)
+ raise ConcurrentMigrationError.new(
+ ConcurrentMigrationError::RELEASE_LOCK_FAILED_MESSAGE
+ )
+ end
+ end
- MIGRATOR_SALT = 2053462845
- def generate_migrator_advisory_lock_id
- db_name_hash = Zlib.crc32(Base.connection.current_database)
- MIGRATOR_SALT * db_name_hash
- end
+ MIGRATOR_SALT = 2053462845
+ def generate_migrator_advisory_lock_id
+ db_name_hash = Zlib.crc32(Base.connection.current_database)
+ MIGRATOR_SALT * db_name_hash
+ end
end
end
diff --git a/activerecord/lib/active_record/migration/command_recorder.rb b/activerecord/lib/active_record/migration/command_recorder.rb
index 0fa665c7e0..8e7f596076 100644
--- a/activerecord/lib/active_record/migration/command_recorder.rb
+++ b/activerecord/lib/active_record/migration/command_recorder.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
class Migration
# <tt>ActiveRecord::Migration::CommandRecorder</tt> records commands done during
@@ -83,7 +85,7 @@ module ActiveRecord
# invert the +command+.
def inverse_of(command, args, &block)
method = :"invert_#{command}"
- raise IrreversibleMigration, <<-MSG.strip_heredoc unless respond_to?(method, true)
+ raise IrreversibleMigration, <<~MSG unless respond_to?(method, true)
This migration uses #{command}, which is not automatically reversible.
To make the migration reversible you can either:
1. Define #up and #down methods in place of the #change method.
@@ -92,10 +94,6 @@ module ActiveRecord
send(method, args, &block)
end
- def respond_to?(*args) # :nodoc:
- super || delegate.respond_to?(*args)
- end
-
ReversibleAndIrreversibleMethods.each do |method|
class_eval <<-EOV, __FILE__, __LINE__ + 1
def #{method}(*args, &block) # def create_table(*args, &block)
@@ -110,129 +108,154 @@ module ActiveRecord
yield delegate.update_table_definition(table_name, self)
end
+ def replay(migration)
+ commands.each do |cmd, args, block|
+ migration.send(cmd, *args, &block)
+ end
+ end
+
private
- module StraightReversions
- private
- { transaction: :transaction,
- execute_block: :execute_block,
- create_table: :drop_table,
- create_join_table: :drop_join_table,
- add_column: :remove_column,
- add_timestamps: :remove_timestamps,
- add_reference: :remove_reference,
- enable_extension: :disable_extension
- }.each do |cmd, inv|
- [[inv, cmd], [cmd, inv]].uniq.each do |method, inverse|
- class_eval <<-EOV, __FILE__, __LINE__ + 1
- def invert_#{method}(args, &block) # def invert_create_table(args, &block)
- [:#{inverse}, args, block] # [:drop_table, args, block]
- end # end
- EOV
- end
+ module StraightReversions # :nodoc:
+ private
+ {
+ execute_block: :execute_block,
+ create_table: :drop_table,
+ create_join_table: :drop_join_table,
+ add_column: :remove_column,
+ add_timestamps: :remove_timestamps,
+ add_reference: :remove_reference,
+ enable_extension: :disable_extension
+ }.each do |cmd, inv|
+ [[inv, cmd], [cmd, inv]].uniq.each do |method, inverse|
+ class_eval <<-EOV, __FILE__, __LINE__ + 1
+ def invert_#{method}(args, &block) # def invert_create_table(args, &block)
+ [:#{inverse}, args, block] # [:drop_table, args, block]
+ end # end
+ EOV
+ end
+ end
end
- end
- include StraightReversions
+ include StraightReversions
+
+ def invert_transaction(args)
+ sub_recorder = CommandRecorder.new(delegate)
+ sub_recorder.revert { yield }
- def invert_drop_table(args, &block)
- if args.size == 1 && block == nil
- raise ActiveRecord::IrreversibleMigration, "To avoid mistakes, drop_table is only reversible if given options or a block (can be empty)."
+ invertions_proc = proc {
+ sub_recorder.replay(self)
+ }
+
+ [:transaction, args, invertions_proc]
end
- super
- end
- def invert_rename_table(args)
- [:rename_table, args.reverse]
- end
+ def invert_drop_table(args, &block)
+ if args.size == 1 && block == nil
+ raise ActiveRecord::IrreversibleMigration, "To avoid mistakes, drop_table is only reversible if given options or a block (can be empty)."
+ end
+ super
+ end
- def invert_remove_column(args)
- raise ActiveRecord::IrreversibleMigration, "remove_column is only reversible if given a type." if args.size <= 2
- super
- end
+ def invert_rename_table(args)
+ [:rename_table, args.reverse]
+ end
- def invert_rename_index(args)
- [:rename_index, [args.first] + args.last(2).reverse]
- end
+ def invert_remove_column(args)
+ raise ActiveRecord::IrreversibleMigration, "remove_column is only reversible if given a type." if args.size <= 2
+ super
+ end
- def invert_rename_column(args)
- [:rename_column, [args.first] + args.last(2).reverse]
- end
+ def invert_rename_index(args)
+ [:rename_index, [args.first] + args.last(2).reverse]
+ end
- def invert_add_index(args)
- table, columns, options = *args
- options ||= {}
+ def invert_rename_column(args)
+ [:rename_column, [args.first] + args.last(2).reverse]
+ end
- index_name = options[:name]
- options_hash = index_name ? { name: index_name } : { column: columns }
+ def invert_add_index(args)
+ table, columns, options = *args
+ options ||= {}
- [:remove_index, [table, options_hash]]
- end
+ options_hash = options.slice(:name, :algorithm)
+ options_hash[:column] = columns if !options_hash[:name]
+
+ [:remove_index, [table, options_hash]]
+ end
- def invert_remove_index(args)
- table, options_or_column = *args
- if (options = options_or_column).is_a?(Hash)
- unless options[:column]
- raise ActiveRecord::IrreversibleMigration, "remove_index is only reversible if given a :column option."
+ def invert_remove_index(args)
+ table, options_or_column = *args
+ if (options = options_or_column).is_a?(Hash)
+ unless options[:column]
+ raise ActiveRecord::IrreversibleMigration, "remove_index is only reversible if given a :column option."
+ end
+ options = options.dup
+ [:add_index, [table, options.delete(:column), options]]
+ elsif (column = options_or_column).present?
+ [:add_index, [table, column]]
end
- options = options.dup
- [:add_index, [table, options.delete(:column), options]]
- elsif (column = options_or_column).present?
- [:add_index, [table, column]]
end
- end
- alias :invert_add_belongs_to :invert_add_reference
- alias :invert_remove_belongs_to :invert_remove_reference
+ alias :invert_add_belongs_to :invert_add_reference
+ alias :invert_remove_belongs_to :invert_remove_reference
- def invert_change_column_default(args)
- table, column, options = *args
+ def invert_change_column_default(args)
+ table, column, options = *args
- unless options && options.is_a?(Hash) && options.has_key?(:from) && options.has_key?(:to)
- raise ActiveRecord::IrreversibleMigration, "change_column_default is only reversible if given a :from and :to option."
+ unless options && options.is_a?(Hash) && options.has_key?(:from) && options.has_key?(:to)
+ raise ActiveRecord::IrreversibleMigration, "change_column_default is only reversible if given a :from and :to option."
+ end
+
+ [:change_column_default, [table, column, from: options[:to], to: options[:from]]]
end
- [:change_column_default, [table, column, from: options[:to], to: options[:from]]]
- end
+ def invert_change_column_null(args)
+ args[2] = !args[2]
+ [:change_column_null, args]
+ end
- def invert_change_column_null(args)
- args[2] = !args[2]
- [:change_column_null, args]
- end
+ def invert_add_foreign_key(args)
+ from_table, to_table, add_options = args
+ add_options ||= {}
- def invert_add_foreign_key(args)
- from_table, to_table, add_options = args
- add_options ||= {}
+ if add_options[:name]
+ options = { name: add_options[:name] }
+ elsif add_options[:column]
+ options = { column: add_options[:column] }
+ else
+ options = to_table
+ end
- if add_options[:name]
- options = { name: add_options[:name] }
- elsif add_options[:column]
- options = { column: add_options[:column] }
- else
- options = to_table
+ [:remove_foreign_key, [from_table, options]]
end
- [:remove_foreign_key, [from_table, options]]
- end
+ def invert_remove_foreign_key(args)
+ options = args.extract_options!
+ from_table, to_table = args
- def invert_remove_foreign_key(args)
- from_table, to_table, remove_options = args
- raise ActiveRecord::IrreversibleMigration, "remove_foreign_key is only reversible if given a second table" if to_table.nil? || to_table.is_a?(Hash)
+ to_table ||= options.delete(:to_table)
- reversed_args = [from_table, to_table]
- reversed_args << remove_options if remove_options
+ raise ActiveRecord::IrreversibleMigration, "remove_foreign_key is only reversible if given a second table" if to_table.nil?
- [:add_foreign_key, reversed_args]
- end
+ reversed_args = [from_table, to_table]
+ reversed_args << options unless options.empty?
- # Forwards any missing method call to the \target.
- def method_missing(method, *args, &block)
- if @delegate.respond_to?(method)
- @delegate.send(method, *args, &block)
- else
- super
+ [:add_foreign_key, reversed_args]
+ end
+
+ def respond_to_missing?(method, _)
+ super || delegate.respond_to?(method)
+ end
+
+ # Forwards any missing method call to the \target.
+ def method_missing(method, *args, &block)
+ if delegate.respond_to?(method)
+ delegate.public_send(method, *args, &block)
+ else
+ super
+ end
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/migration/compatibility.rb b/activerecord/lib/active_record/migration/compatibility.rb
index a20d7e0820..abc939826b 100644
--- a/activerecord/lib/active_record/migration/compatibility.rb
+++ b/activerecord/lib/active_record/migration/compatibility.rb
@@ -1,55 +1,185 @@
+# frozen_string_literal: true
+
module ActiveRecord
class Migration
module Compatibility # :nodoc: all
- V5_0 = Current
+ def self.find(version)
+ version = version.to_s
+ name = "V#{version.tr('.', '_')}"
+ unless const_defined?(name)
+ versions = constants.grep(/\AV[0-9_]+\z/).map { |s| s.to_s.delete("V").tr("_", ".").inspect }
+ raise ArgumentError, "Unknown migration version #{version.inspect}; expected one of #{versions.sort.join(', ')}"
+ end
+ const_get(name)
+ end
+
+ V6_0 = Current
- module FourTwoShared
+ class V5_2 < V6_0
module TableDefinition
- def references(*, **options)
- options[:index] ||= false
+ def timestamps(**options)
+ options[:precision] ||= nil
super
end
- alias :belongs_to :references
+ end
- def timestamps(*, **options)
- options[:null] = true if options[:null].nil?
+ module CommandRecorder
+ def invert_transaction(args, &block)
+ [:transaction, args, block]
+ end
+ end
+
+ def create_table(table_name, **options)
+ if block_given?
+ super { |t| yield compatible_table_definition(t) }
+ else
super
end
end
- def create_table(table_name, options = {})
+ def change_table(table_name, **options)
if block_given?
- super(table_name, options) do |t|
- class << t
- prepend TableDefinition
- end
- yield t
- end
+ super { |t| yield compatible_table_definition(t) }
else
super
end
end
- def change_table(table_name, options = {})
+ def create_join_table(table_1, table_2, **options)
if block_given?
- super(table_name, options) do |t|
- class << t
- prepend TableDefinition
- end
- yield t
+ super { |t| yield compatible_table_definition(t) }
+ else
+ super
+ end
+ end
+
+ def add_timestamps(table_name, **options)
+ options[:precision] ||= nil
+ super
+ end
+
+ private
+ def compatible_table_definition(t)
+ class << t
+ prepend TableDefinition
+ end
+ t
+ end
+
+ def command_recorder
+ recorder = super
+ class << recorder
+ prepend CommandRecorder
end
+ recorder
+ end
+ end
+
+ class V5_1 < V5_2
+ def change_column(table_name, column_name, type, options = {})
+ if connection.adapter_name == "PostgreSQL"
+ super(table_name, column_name, type, options.except(:default, :null, :comment))
+ connection.change_column_default(table_name, column_name, options[:default]) if options.key?(:default)
+ connection.change_column_null(table_name, column_name, options[:null], options[:default]) if options.key?(:null)
+ connection.change_column_comment(table_name, column_name, options[:comment]) if options.key?(:comment)
+ else
+ super
+ end
+ end
+
+ def create_table(table_name, options = {})
+ if connection.adapter_name == "Mysql2"
+ super(table_name, options: "ENGINE=InnoDB", **options)
else
super
end
end
+ end
+
+ class V5_0 < V5_1
+ module TableDefinition
+ def primary_key(name, type = :primary_key, **options)
+ type = :integer if type == :primary_key
+ super
+ end
+
+ def references(*args, **options)
+ super(*args, type: :integer, **options)
+ end
+ alias :belongs_to :references
+ end
+
+ def create_table(table_name, options = {})
+ if connection.adapter_name == "PostgreSQL"
+ if options[:id] == :uuid && !options.key?(:default)
+ options[:default] = "uuid_generate_v4()"
+ end
+ end
+
+ unless connection.adapter_name == "Mysql2" && options[:id] == :bigint
+ if [:integer, :bigint].include?(options[:id]) && !options.key?(:default)
+ options[:default] = nil
+ end
+ end
+
+ # Since 5.1 PostgreSQL adapter uses bigserial type for primary
+ # keys by default and MySQL uses bigint. This compat layer makes old migrations utilize
+ # serial/int type instead -- the way it used to work before 5.1.
+ unless options.key?(:id)
+ options[:id] = :integer
+ end
+
+ super
+ end
+
+ def create_join_table(table_1, table_2, column_options: {}, **options)
+ column_options.reverse_merge!(type: :integer)
+ super
+ end
+
+ def add_column(table_name, column_name, type, options = {})
+ if type == :primary_key
+ type = :integer
+ options[:primary_key] = true
+ end
+ super
+ end
+
+ def add_reference(table_name, ref_name, **options)
+ super(table_name, ref_name, type: :integer, **options)
+ end
+ alias :add_belongs_to :add_reference
+
+ private
+ def compatible_table_definition(t)
+ class << t
+ prepend TableDefinition
+ end
+ super
+ end
+ end
+
+ class V4_2 < V5_0
+ module TableDefinition
+ def references(*, **options)
+ options[:index] ||= false
+ super
+ end
+ alias :belongs_to :references
+
+ def timestamps(**options)
+ options[:null] = true if options[:null].nil?
+ super
+ end
+ end
- def add_reference(*, **options)
+ def add_reference(table_name, ref_name, **options)
options[:index] ||= false
super
end
alias :add_belongs_to :add_reference
- def add_timestamps(*, **options)
+ def add_timestamps(table_name, **options)
options[:null] = true if options[:null].nil?
super
end
@@ -60,7 +190,7 @@ module ActiveRecord
if options[:name].present?
options[:name].to_s
else
- index_name(table_name, column: column_names)
+ connection.index_name(table_name, column: column_names)
end
super
end
@@ -72,44 +202,32 @@ module ActiveRecord
end
private
-
- def index_name_for_remove(table_name, options = {})
- index_name = index_name(table_name, options)
-
- unless index_name_exists?(table_name, index_name, true)
- if options.is_a?(Hash) && options.has_key?(:name)
- options_without_column = options.dup
- options_without_column.delete :column
- index_name_without_column = index_name(table_name, options_without_column)
-
- return index_name_without_column if index_name_exists?(table_name, index_name_without_column, false)
+ def compatible_table_definition(t)
+ class << t
+ prepend TableDefinition
end
-
- raise ArgumentError, "Index name '#{index_name}' on table '#{table_name}' does not exist"
+ super
end
- index_name
- end
- end
+ def index_name_for_remove(table_name, options = {})
+ index_name = connection.index_name(table_name, options)
- class V4_2 < V5_0
- # 4.2 is defined as a module because it needs to be shared with
- # Legacy. When the time comes, V5_0 should be defined straight
- # in its class.
- include FourTwoShared
- end
+ unless connection.index_name_exists?(table_name, index_name)
+ if options.is_a?(Hash) && options.has_key?(:name)
+ options_without_column = options.dup
+ options_without_column.delete :column
+ index_name_without_column = connection.index_name(table_name, options_without_column)
- module Legacy
- include FourTwoShared
+ if connection.index_name_exists?(table_name, index_name_without_column)
+ return index_name_without_column
+ end
+ end
- def migrate(*)
- ActiveSupport::Deprecation.warn \
- "Directly inheriting from ActiveRecord::Migration is deprecated. " \
- "Please specify the Rails release the migration was written for:\n" \
- "\n" \
- " class #{self.class.name} < ActiveRecord::Migration[4.2]"
- super
- end
+ raise ArgumentError, "Index name '#{index_name}' on table '#{table_name}' does not exist"
+ end
+
+ index_name
+ end
end
end
end
diff --git a/activerecord/lib/active_record/migration/join_table.rb b/activerecord/lib/active_record/migration/join_table.rb
index 05569fadbd..9abb289bb0 100644
--- a/activerecord/lib/active_record/migration/join_table.rb
+++ b/activerecord/lib/active_record/migration/join_table.rb
@@ -1,15 +1,17 @@
+# frozen_string_literal: true
+
module ActiveRecord
class Migration
module JoinTable #:nodoc:
private
- def find_join_table_name(table_1, table_2, options = {})
- options.delete(:table_name) || join_table_name(table_1, table_2)
- end
+ def find_join_table_name(table_1, table_2, options = {})
+ options.delete(:table_name) || join_table_name(table_1, table_2)
+ end
- def join_table_name(table_1, table_2)
- ModelSchema.derive_join_table_name(table_1, table_2).to_sym
- end
+ def join_table_name(table_1, table_2)
+ ModelSchema.derive_join_table_name(table_1, table_2).to_sym
+ end
end
end
end
diff --git a/activerecord/lib/active_record/model_schema.rb b/activerecord/lib/active_record/model_schema.rb
index 52eab952e1..55fc58e339 100644
--- a/activerecord/lib/active_record/model_schema.rb
+++ b/activerecord/lib/active_record/model_schema.rb
@@ -1,78 +1,139 @@
+# frozen_string_literal: true
+
+require "monitor"
+
module ActiveRecord
module ModelSchema
extend ActiveSupport::Concern
+ ##
+ # :singleton-method: primary_key_prefix_type
+ # :call-seq: primary_key_prefix_type
+ #
+ # The prefix type that will be prepended to every primary key column name.
+ # The options are +:table_name+ and +:table_name_with_underscore+. If the first is specified,
+ # the Product class will look for "productid" instead of "id" as the primary column. If the
+ # latter is specified, the Product class will look for "product_id" instead of "id". Remember
+ # that this is a global setting for all Active Records.
+
+ ##
+ # :singleton-method: primary_key_prefix_type=
+ # :call-seq: primary_key_prefix_type=(prefix_type)
+ #
+ # Sets the prefix type that will be prepended to every primary key column name.
+ # The options are +:table_name+ and +:table_name_with_underscore+. If the first is specified,
+ # the Product class will look for "productid" instead of "id" as the primary column. If the
+ # latter is specified, the Product class will look for "product_id" instead of "id". Remember
+ # that this is a global setting for all Active Records.
+
+ ##
+ # :singleton-method: table_name_prefix
+ # :call-seq: table_name_prefix
+ #
+ # The prefix string to prepend to every table name.
+
+ ##
+ # :singleton-method: table_name_prefix=
+ # :call-seq: table_name_prefix=(prefix)
+ #
+ # Sets the prefix string to prepend to every table name. So if set to "basecamp_", all table
+ # names will be named like "basecamp_projects", "basecamp_people", etc. This is a convenient
+ # way of creating a namespace for tables in a shared database. By default, the prefix is the
+ # empty string.
+ #
+ # If you are organising your models within modules you can add a prefix to the models within
+ # a namespace by defining a singleton method in the parent module called table_name_prefix which
+ # returns your chosen prefix.
+
+ ##
+ # :singleton-method: table_name_suffix
+ # :call-seq: table_name_suffix
+ #
+ # The suffix string to append to every table name.
+
+ ##
+ # :singleton-method: table_name_suffix=
+ # :call-seq: table_name_suffix=(suffix)
+ #
+ # Works like +table_name_prefix=+, but appends instead of prepends (set to "_basecamp" gives "projects_basecamp",
+ # "people_basecamp"). By default, the suffix is the empty string.
+ #
+ # If you are organising your models within modules, you can add a suffix to the models within
+ # a namespace by defining a singleton method in the parent module called table_name_suffix which
+ # returns your chosen suffix.
+
+ ##
+ # :singleton-method: schema_migrations_table_name
+ # :call-seq: schema_migrations_table_name
+ #
+ # The name of the schema migrations table. By default, the value is <tt>"schema_migrations"</tt>.
+
+ ##
+ # :singleton-method: schema_migrations_table_name=
+ # :call-seq: schema_migrations_table_name=(table_name)
+ #
+ # Sets the name of the schema migrations table.
+
+ ##
+ # :singleton-method: internal_metadata_table_name
+ # :call-seq: internal_metadata_table_name
+ #
+ # The name of the internal metadata table. By default, the value is <tt>"ar_internal_metadata"</tt>.
+
+ ##
+ # :singleton-method: internal_metadata_table_name=
+ # :call-seq: internal_metadata_table_name=(table_name)
+ #
+ # Sets the name of the internal metadata table.
+
+ ##
+ # :singleton-method: pluralize_table_names
+ # :call-seq: pluralize_table_names
+ #
+ # Indicates whether table names should be the pluralized versions of the corresponding class names.
+ # If true, the default table name for a Product class will be "products". If false, it would just be "product".
+ # See table_name for the full rules on table/class naming. This is true, by default.
+
+ ##
+ # :singleton-method: pluralize_table_names=
+ # :call-seq: pluralize_table_names=(value)
+ #
+ # Set whether table names should be the pluralized versions of the corresponding class names.
+ # If true, the default table name for a Product class will be "products". If false, it would just be "product".
+ # See table_name for the full rules on table/class naming. This is true, by default.
+
+ ##
+ # :singleton-method: implicit_order_column
+ # :call-seq: implicit_order_column
+ #
+ # The name of the column records are ordered by if no explicit order clause
+ # is used during an ordered finder call. If not set the primary key is used.
+
+ ##
+ # :singleton-method: implicit_order_column=
+ # :call-seq: implicit_order_column=(column_name)
+ #
+ # Sets the column to sort records by when no explicit order clause is used
+ # during an ordered finder call. Useful when the primary key is not an
+ # auto-incrementing integer, for example when it's a UUID. Note that using
+ # a non-unique column can result in non-deterministic results.
included do
- ##
- # :singleton-method:
- # Accessor for the prefix type that will be prepended to every primary key column name.
- # The options are :table_name and :table_name_with_underscore. If the first is specified,
- # the Product class will look for "productid" instead of "id" as the primary column. If the
- # latter is specified, the Product class will look for "product_id" instead of "id". Remember
- # that this is a global setting for all Active Records.
mattr_accessor :primary_key_prefix_type, instance_writer: false
- ##
- # :singleton-method:
- # Accessor for the name of the prefix string to prepend to every table name. So if set
- # to "basecamp_", all table names will be named like "basecamp_projects", "basecamp_people",
- # etc. This is a convenient way of creating a namespace for tables in a shared database.
- # By default, the prefix is the empty string.
- #
- # If you are organising your models within modules you can add a prefix to the models within
- # a namespace by defining a singleton method in the parent module called table_name_prefix which
- # returns your chosen prefix.
- class_attribute :table_name_prefix, instance_writer: false
- self.table_name_prefix = ""
-
- ##
- # :singleton-method:
- # Works like +table_name_prefix+, but appends instead of prepends (set to "_basecamp" gives "projects_basecamp",
- # "people_basecamp"). By default, the suffix is the empty string.
- #
- # If you are organising your models within modules, you can add a suffix to the models within
- # a namespace by defining a singleton method in the parent module called table_name_suffix which
- # returns your chosen suffix.
- class_attribute :table_name_suffix, instance_writer: false
- self.table_name_suffix = ""
-
- ##
- # :singleton-method:
- # Accessor for the name of the schema migrations table. By default, the value is "schema_migrations"
- class_attribute :schema_migrations_table_name, instance_accessor: false
- self.schema_migrations_table_name = "schema_migrations"
-
- ##
- # :singleton-method:
- # Accessor for the name of the internal metadata table. By default, the value is "ar_internal_metadata"
- class_attribute :internal_metadata_table_name, instance_accessor: false
- self.internal_metadata_table_name = "ar_internal_metadata"
-
- ##
- # :singleton-method:
- # Accessor for an array of names of environments where destructive actions should be prohibited. By default,
- # the value is ["production"]
- class_attribute :protected_environments, instance_accessor: false
- self.protected_environments = ["production"]
+ class_attribute :table_name_prefix, instance_writer: false, default: ""
+ class_attribute :table_name_suffix, instance_writer: false, default: ""
+ class_attribute :schema_migrations_table_name, instance_accessor: false, default: "schema_migrations"
+ class_attribute :internal_metadata_table_name, instance_accessor: false, default: "ar_internal_metadata"
+ class_attribute :pluralize_table_names, instance_writer: false, default: true
+ class_attribute :implicit_order_column, instance_accessor: false
- ##
- # :singleton-method:
- # Indicates whether table names should be the pluralized versions of the corresponding class names.
- # If true, the default table name for a Product class will be +products+. If false, it would just be +product+.
- # See table_name for the full rules on table/class naming. This is true, by default.
- class_attribute :pluralize_table_names, instance_writer: false
- self.pluralize_table_names = true
-
- ##
- # :singleton-method:
- # Accessor for the list of columns names the model should ignore. Ignored columns won't have attribute
- # accessors defined, and won't be referenced in SQL queries.
- class_attribute :ignored_columns, instance_accessor: false
+ self.protected_environments = ["production"]
+ self.inheritance_column = "type"
self.ignored_columns = [].freeze
- self.inheritance_column = 'type'
-
delegate :type_for_attribute, to: :class
+
+ initialize_load_schema_monitor
end
# Derives the join table name for +first_table+ and +second_table+. The
@@ -173,11 +234,26 @@ module ActiveRecord
end
def full_table_name_prefix #:nodoc:
- (parents.detect{ |p| p.respond_to?(:table_name_prefix) } || self).table_name_prefix
+ (module_parents.detect { |p| p.respond_to?(:table_name_prefix) } || self).table_name_prefix
end
def full_table_name_suffix #:nodoc:
- (parents.detect {|p| p.respond_to?(:table_name_suffix) } || self).table_name_suffix
+ (module_parents.detect { |p| p.respond_to?(:table_name_suffix) } || self).table_name_suffix
+ end
+
+ # The array of names of environments where destructive actions should be prohibited. By default,
+ # the value is <tt>["production"]</tt>.
+ def protected_environments
+ if defined?(@protected_environments)
+ @protected_environments
+ else
+ superclass.protected_environments
+ end
+ end
+
+ # Sets an array of names of environments where destructive actions should be prohibited.
+ def protected_environments=(environments)
+ @protected_environments = environments.map(&:to_s)
end
# Defines the name of the table column which will store the class name on single-table
@@ -199,8 +275,24 @@ module ActiveRecord
@explicit_inheritance_column = true
end
+ # The list of columns names the model should ignore. Ignored columns won't have attribute
+ # accessors defined, and won't be referenced in SQL queries.
+ def ignored_columns
+ if defined?(@ignored_columns)
+ @ignored_columns
+ else
+ superclass.ignored_columns
+ end
+ end
+
+ # Sets the columns names the model should ignore. Ignored columns won't have attribute
+ # accessors defined, and won't be referenced in SQL queries.
+ def ignored_columns=(columns)
+ @ignored_columns = columns.map(&:to_s)
+ end
+
def sequence_name
- if base_class == self
+ if base_class?
@sequence_name ||= reset_sequence_name
else
(@sequence_name ||= nil) || base_class.sequence_name
@@ -213,7 +305,7 @@ module ActiveRecord
end
# Sets the name of the sequence to use when generating ids to the given
- # value, or (if the value is nil or false) to the value returned by the
+ # value, or (if the value is +nil+ or +false+) to the value returned by the
# given block. This is required for Oracle and is useful for any
# database which relies on sequences for primary key generation.
#
@@ -238,7 +330,7 @@ module ActiveRecord
end
# Returns the next value that will be used as the primary key on
- # an insert statment.
+ # an insert statement.
def next_sequence_value
connection.next_sequence_value(sequence_name)
end
@@ -249,7 +341,11 @@ module ActiveRecord
end
def attributes_builder # :nodoc:
- @attributes_builder ||= AttributeSet::Builder.new(attribute_types, primary_key)
+ unless defined?(@attributes_builder) && @attributes_builder
+ defaults = _default_attributes.except(*(column_names - [primary_key]))
+ @attributes_builder = ActiveModel::AttributeSet::Builder.new(attribute_types, defaults)
+ end
+ @attributes_builder
end
def columns_hash # :nodoc:
@@ -264,7 +360,11 @@ module ActiveRecord
def attribute_types # :nodoc:
load_schema
- @attribute_types ||= Hash.new(Type::Value.new)
+ @attribute_types ||= Hash.new(Type.default_value)
+ end
+
+ def yaml_encoder # :nodoc:
+ @yaml_encoder ||= ActiveModel::AttributeSet::YAMLEncoder.new(attribute_types)
end
# Returns the type of the attribute with the given name, after applying
@@ -277,20 +377,26 @@ module ActiveRecord
# it).
#
# +attr_name+ The name of the attribute to retrieve the type for. Must be
- # a string
- def type_for_attribute(attr_name)
- attribute_types[attr_name]
+ # a string or a symbol.
+ def type_for_attribute(attr_name, &block)
+ attr_name = attr_name.to_s
+ if block
+ attribute_types.fetch(attr_name, &block)
+ else
+ attribute_types[attr_name]
+ end
end
# Returns a hash where the keys are column names and the values are
# default values when instantiating the Active Record object for this table.
def column_defaults
load_schema
- _default_attributes.to_hash
+ @column_defaults ||= _default_attributes.deep_dup.to_hash
end
def _default_attributes # :nodoc:
- @default_attributes ||= AttributeSet.new({})
+ load_schema
+ @default_attributes ||= ActiveModel::AttributeSet.new({})
end
# Returns an array of column names as strings.
@@ -298,10 +404,20 @@ module ActiveRecord
@column_names ||= columns.map(&:name)
end
+ def symbol_column_to_string(name_symbol) # :nodoc:
+ @symbol_column_to_string_name_hash ||= column_names.index_by(&:to_sym)
+ @symbol_column_to_string_name_hash[name_symbol]
+ end
+
# Returns an array of column objects where the primary id, all columns ending in "_id" or "_count",
# and columns used for single table inheritance have been removed.
def content_columns
- @content_columns ||= columns.reject { |c| c.name == primary_key || c.name =~ /(_id|_count)$/ || c.name == inheritance_column }
+ @content_columns ||= columns.reject do |c|
+ c.name == primary_key ||
+ c.name == inheritance_column ||
+ c.name.end_with?("_id") ||
+ c.name.end_with?("_count")
+ end
end
# Resets all the cached information about columns, which will cause them
@@ -332,99 +448,95 @@ module ActiveRecord
# end
def reset_column_information
connection.clear_cache!
- undefine_attribute_methods
+ ([self] + descendants).each(&:undefine_attribute_methods)
connection.schema_cache.clear_data_source_cache!(table_name)
reload_schema_from_cache
+ initialize_find_by_cache
end
- private
-
- def schema_loaded?
- defined?(@columns_hash) && @columns_hash
- end
+ protected
- def load_schema
- unless schema_loaded?
- load_schema!
+ def initialize_load_schema_monitor
+ @load_schema_monitor = Monitor.new
end
- end
- def load_schema!
- @columns_hash = connection.schema_cache.columns_hash(table_name).except(*ignored_columns)
- @columns_hash.each do |name, column|
- warn_if_deprecated_type(column)
- define_attribute(
- name,
- connection.lookup_cast_type_from_column(column),
- default: column.default,
- user_provided_default: false
- )
+ private
+
+ def inherited(child_class)
+ super
+ child_class.initialize_load_schema_monitor
end
- end
- def reload_schema_from_cache
- @arel_engine = nil
- @arel_table = nil
- @column_names = nil
- @attribute_types = nil
- @content_columns = nil
- @default_attributes = nil
- @inheritance_column = nil unless defined?(@explicit_inheritance_column) && @explicit_inheritance_column
- @attributes_builder = nil
- @columns = nil
- @columns_hash = nil
- @attribute_names = nil
- direct_descendants.each do |descendant|
- descendant.send(:reload_schema_from_cache)
+ def schema_loaded?
+ defined?(@schema_loaded) && @schema_loaded
end
- end
- # Guesses the table name, but does not decorate it with prefix and suffix information.
- def undecorated_table_name(class_name = base_class.name)
- table_name = class_name.to_s.demodulize.underscore
- pluralize_table_names ? table_name.pluralize : table_name
- end
+ def load_schema
+ return if schema_loaded?
+ @load_schema_monitor.synchronize do
+ return if defined?(@columns_hash) && @columns_hash
- # Computes and returns a table name according to default conventions.
- def compute_table_name
- base = base_class
- if self == base
- # Nested classes are prefixed with singular parent table name.
- if parent < Base && !parent.abstract_class?
- contained = parent.table_name
- contained = contained.singularize if parent.pluralize_table_names
- contained += '_'
- end
+ load_schema!
- "#{full_table_name_prefix}#{contained}#{undecorated_table_name(name)}#{full_table_name_suffix}"
- else
- # STI subclasses always use their superclass' table.
- base.table_name
+ @schema_loaded = true
+ end
end
- end
- def warn_if_deprecated_type(column)
- return if attributes_to_define_after_schema_loads.key?(column.name)
- if column.respond_to?(:oid) && column.sql_type.start_with?("point")
- if column.array?
- array_arguments = ", array: true"
- else
- array_arguments = ""
+ def load_schema!
+ @columns_hash = connection.schema_cache.columns_hash(table_name).except(*ignored_columns)
+ @columns_hash.each do |name, column|
+ define_attribute(
+ name,
+ connection.lookup_cast_type_from_column(column),
+ default: column.default,
+ user_provided_default: false
+ )
end
- ActiveSupport::Deprecation.warn(<<-WARNING.strip_heredoc)
- The behavior of the `:point` type will be changing in Rails 5.1 to
- return a `Point` object, rather than an `Array`. If you'd like to
- keep the old behavior, you can add this line to #{self.name}:
+ end
- attribute :#{column.name}, :legacy_point#{array_arguments}
+ def reload_schema_from_cache
+ @arel_table = nil
+ @column_names = nil
+ @symbol_column_to_string_name_hash = nil
+ @attribute_types = nil
+ @content_columns = nil
+ @default_attributes = nil
+ @column_defaults = nil
+ @inheritance_column = nil unless defined?(@explicit_inheritance_column) && @explicit_inheritance_column
+ @attributes_builder = nil
+ @columns = nil
+ @columns_hash = nil
+ @schema_loaded = false
+ @attribute_names = nil
+ @yaml_encoder = nil
+ direct_descendants.each do |descendant|
+ descendant.send(:reload_schema_from_cache)
+ end
+ end
- If you'd like the new behavior today, you can add this line:
+ # Guesses the table name, but does not decorate it with prefix and suffix information.
+ def undecorated_table_name(class_name = base_class.name)
+ table_name = class_name.to_s.demodulize.underscore
+ pluralize_table_names ? table_name.pluralize : table_name
+ end
- attribute :#{column.name}, :point#{array_arguments}
- WARNING
+ # Computes and returns a table name according to default conventions.
+ def compute_table_name
+ if base_class?
+ # Nested classes are prefixed with singular parent table name.
+ if module_parent < Base && !module_parent.abstract_class?
+ contained = module_parent.table_name
+ contained = contained.singularize if module_parent.pluralize_table_names
+ contained += "_"
+ end
+
+ "#{full_table_name_prefix}#{contained}#{undecorated_table_name(name)}#{full_table_name_suffix}"
+ else
+ # STI subclasses always use their superclass' table.
+ base_class.table_name
+ end
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/nested_attributes.rb b/activerecord/lib/active_record/nested_attributes.rb
index fe68869143..8b9098df6c 100644
--- a/activerecord/lib/active_record/nested_attributes.rb
+++ b/activerecord/lib/active_record/nested_attributes.rb
@@ -1,6 +1,9 @@
-require 'active_support/core_ext/hash/except'
-require 'active_support/core_ext/object/try'
-require 'active_support/core_ext/hash/indifferent_access'
+# frozen_string_literal: true
+
+require "active_support/core_ext/hash/except"
+require "active_support/core_ext/module/redefine_method"
+require "active_support/core_ext/object/try"
+require "active_support/core_ext/hash/indifferent_access"
module ActiveRecord
module NestedAttributes #:nodoc:
@@ -10,8 +13,7 @@ module ActiveRecord
extend ActiveSupport::Concern
included do
- class_attribute :nested_attributes_options, instance_writer: false
- self.nested_attributes_options = {}
+ class_attribute :nested_attributes_options, instance_writer: false, default: {}
end
# = Active Record Nested Attributes
@@ -61,6 +63,18 @@ module ActiveRecord
# member.update params[:member]
# member.avatar.icon # => 'sad'
#
+ # If you want to update the current avatar without providing the id, you must add <tt>:update_only</tt> option.
+ #
+ # class Member < ActiveRecord::Base
+ # has_one :avatar
+ # accepts_nested_attributes_for :avatar, update_only: true
+ # end
+ #
+ # params = { member: { avatar_attributes: { icon: 'sad' } } }
+ # member.update params[:member]
+ # member.avatar.id # => 2
+ # member.avatar.icon # => 'sad'
+ #
# By default you will only be able to set and update attributes on the
# associated model. If you want to destroy the associated model through the
# attributes hash, you have to enable it first using the
@@ -267,7 +281,7 @@ module ActiveRecord
# member.avatar_attributes = {icon: 'sad'}
# member.avatar.width # => 200
module ClassMethods
- REJECT_ALL_BLANK_PROC = proc { |attributes| attributes.all? { |key, value| key == '_destroy' || value.blank? } }
+ REJECT_ALL_BLANK_PROC = proc { |attributes| attributes.all? { |key, value| key == "_destroy" || value.blank? } }
# Defines an attributes writer for the specified association(s).
#
@@ -317,7 +331,7 @@ module ActiveRecord
# # creates avatar_attributes= and posts_attributes=
# accepts_nested_attributes_for :avatar, :posts, allow_destroy: true
def accepts_nested_attributes_for(*attr_names)
- options = { :allow_destroy => false, :update_only => false }
+ options = { allow_destroy: false, update_only: false }
options.update(attr_names.extract_options!)
options.assert_valid_keys(:allow_destroy, :reject_if, :limit, :update_only)
options[:reject_if] = REJECT_ALL_BLANK_PROC if options[:reject_if] == :all_blank
@@ -341,27 +355,25 @@ module ActiveRecord
private
- # Generates a writer method for this association. Serves as a point for
- # accessing the objects in the association. For example, this method
- # could generate the following:
- #
- # def pirate_attributes=(attributes)
- # assign_nested_attributes_for_one_to_one_association(:pirate, attributes)
- # end
- #
- # This redirects the attempts to write objects in an association through
- # the helper methods defined below. Makes it seem like the nested
- # associations are just regular associations.
- def generate_association_writer(association_name, type)
- generated_association_methods.module_eval <<-eoruby, __FILE__, __LINE__ + 1
- if method_defined?(:#{association_name}_attributes=)
- remove_method(:#{association_name}_attributes=)
- end
- def #{association_name}_attributes=(attributes)
- assign_nested_attributes_for_#{type}_association(:#{association_name}, attributes)
- end
- eoruby
- end
+ # Generates a writer method for this association. Serves as a point for
+ # accessing the objects in the association. For example, this method
+ # could generate the following:
+ #
+ # def pirate_attributes=(attributes)
+ # assign_nested_attributes_for_one_to_one_association(:pirate, attributes)
+ # end
+ #
+ # This redirects the attempts to write objects in an association through
+ # the helper methods defined below. Makes it seem like the nested
+ # associations are just regular associations.
+ def generate_association_writer(association_name, type)
+ generated_association_methods.module_eval <<-eoruby, __FILE__, __LINE__ + 1
+ silence_redefinition_of_method :#{association_name}_attributes=
+ def #{association_name}_attributes=(attributes)
+ assign_nested_attributes_for_#{type}_association(:#{association_name}, attributes)
+ end
+ eoruby
+ end
end
# Returns ActiveRecord::AutosaveAssociation::marked_for_destruction? It's
@@ -375,213 +387,214 @@ module ActiveRecord
private
- # Attribute hash keys that should not be assigned as normal attributes.
- # These hash keys are nested attributes implementation details.
- UNASSIGNABLE_KEYS = %w( id _destroy )
-
- # Assigns the given attributes to the association.
- #
- # If an associated record does not yet exist, one will be instantiated. If
- # an associated record already exists, the method's behavior depends on
- # the value of the update_only option. If update_only is +false+ and the
- # given attributes include an <tt>:id</tt> that matches the existing record's
- # id, then the existing record will be modified. If no <tt>:id</tt> is provided
- # it will be replaced with a new record. If update_only is +true+ the existing
- # record will be modified regardless of whether an <tt>:id</tt> is provided.
- #
- # If the given attributes include a matching <tt>:id</tt> attribute, or
- # update_only is true, and a <tt>:_destroy</tt> key set to a truthy value,
- # then the existing record will be marked for destruction.
- def assign_nested_attributes_for_one_to_one_association(association_name, attributes)
- options = self.nested_attributes_options[association_name]
- if attributes.respond_to?(:permitted?)
- attributes = attributes.to_h
- end
- attributes = attributes.with_indifferent_access
- existing_record = send(association_name)
+ # Attribute hash keys that should not be assigned as normal attributes.
+ # These hash keys are nested attributes implementation details.
+ UNASSIGNABLE_KEYS = %w( id _destroy )
+
+ # Assigns the given attributes to the association.
+ #
+ # If an associated record does not yet exist, one will be instantiated. If
+ # an associated record already exists, the method's behavior depends on
+ # the value of the update_only option. If update_only is +false+ and the
+ # given attributes include an <tt>:id</tt> that matches the existing record's
+ # id, then the existing record will be modified. If no <tt>:id</tt> is provided
+ # it will be replaced with a new record. If update_only is +true+ the existing
+ # record will be modified regardless of whether an <tt>:id</tt> is provided.
+ #
+ # If the given attributes include a matching <tt>:id</tt> attribute, or
+ # update_only is true, and a <tt>:_destroy</tt> key set to a truthy value,
+ # then the existing record will be marked for destruction.
+ def assign_nested_attributes_for_one_to_one_association(association_name, attributes)
+ options = nested_attributes_options[association_name]
+ if attributes.respond_to?(:permitted?)
+ attributes = attributes.to_h
+ end
+ attributes = attributes.with_indifferent_access
+ existing_record = send(association_name)
- if (options[:update_only] || !attributes['id'].blank?) && existing_record &&
- (options[:update_only] || existing_record.id.to_s == attributes['id'].to_s)
- assign_to_or_mark_for_destruction(existing_record, attributes, options[:allow_destroy]) unless call_reject_if(association_name, attributes)
+ if (options[:update_only] || !attributes["id"].blank?) && existing_record &&
+ (options[:update_only] || existing_record.id.to_s == attributes["id"].to_s)
+ assign_to_or_mark_for_destruction(existing_record, attributes, options[:allow_destroy]) unless call_reject_if(association_name, attributes)
- elsif attributes['id'].present?
- raise_nested_attributes_record_not_found!(association_name, attributes['id'])
+ elsif attributes["id"].present?
+ raise_nested_attributes_record_not_found!(association_name, attributes["id"])
- elsif !reject_new_record?(association_name, attributes)
- assignable_attributes = attributes.except(*UNASSIGNABLE_KEYS)
+ elsif !reject_new_record?(association_name, attributes)
+ assignable_attributes = attributes.except(*UNASSIGNABLE_KEYS)
- if existing_record && existing_record.new_record?
- existing_record.assign_attributes(assignable_attributes)
- association(association_name).initialize_attributes(existing_record)
- else
- method = "build_#{association_name}"
- if respond_to?(method)
- send(method, assignable_attributes)
+ if existing_record && existing_record.new_record?
+ existing_record.assign_attributes(assignable_attributes)
+ association(association_name).initialize_attributes(existing_record)
else
- raise ArgumentError, "Cannot build association `#{association_name}'. Are you trying to build a polymorphic one-to-one association?"
+ method = :"build_#{association_name}"
+ if respond_to?(method)
+ send(method, assignable_attributes)
+ else
+ raise ArgumentError, "Cannot build association `#{association_name}'. Are you trying to build a polymorphic one-to-one association?"
+ end
end
end
end
- end
- # Assigns the given attributes to the collection association.
- #
- # Hashes with an <tt>:id</tt> value matching an existing associated record
- # will update that record. Hashes without an <tt>:id</tt> value will build
- # a new record for the association. Hashes with a matching <tt>:id</tt>
- # value and a <tt>:_destroy</tt> key set to a truthy value will mark the
- # matched record for destruction.
- #
- # For example:
- #
- # assign_nested_attributes_for_collection_association(:people, {
- # '1' => { id: '1', name: 'Peter' },
- # '2' => { name: 'John' },
- # '3' => { id: '2', _destroy: true }
- # })
- #
- # Will update the name of the Person with ID 1, build a new associated
- # person with the name 'John', and mark the associated Person with ID 2
- # for destruction.
- #
- # Also accepts an Array of attribute hashes:
- #
- # assign_nested_attributes_for_collection_association(:people, [
- # { id: '1', name: 'Peter' },
- # { name: 'John' },
- # { id: '2', _destroy: true }
- # ])
- def assign_nested_attributes_for_collection_association(association_name, attributes_collection)
- options = self.nested_attributes_options[association_name]
- if attributes_collection.respond_to?(:permitted?)
- attributes_collection = attributes_collection.to_h
- end
+ # Assigns the given attributes to the collection association.
+ #
+ # Hashes with an <tt>:id</tt> value matching an existing associated record
+ # will update that record. Hashes without an <tt>:id</tt> value will build
+ # a new record for the association. Hashes with a matching <tt>:id</tt>
+ # value and a <tt>:_destroy</tt> key set to a truthy value will mark the
+ # matched record for destruction.
+ #
+ # For example:
+ #
+ # assign_nested_attributes_for_collection_association(:people, {
+ # '1' => { id: '1', name: 'Peter' },
+ # '2' => { name: 'John' },
+ # '3' => { id: '2', _destroy: true }
+ # })
+ #
+ # Will update the name of the Person with ID 1, build a new associated
+ # person with the name 'John', and mark the associated Person with ID 2
+ # for destruction.
+ #
+ # Also accepts an Array of attribute hashes:
+ #
+ # assign_nested_attributes_for_collection_association(:people, [
+ # { id: '1', name: 'Peter' },
+ # { name: 'John' },
+ # { id: '2', _destroy: true }
+ # ])
+ def assign_nested_attributes_for_collection_association(association_name, attributes_collection)
+ options = nested_attributes_options[association_name]
+ if attributes_collection.respond_to?(:permitted?)
+ attributes_collection = attributes_collection.to_h
+ end
- unless attributes_collection.is_a?(Hash) || attributes_collection.is_a?(Array)
- raise ArgumentError, "Hash or Array expected, got #{attributes_collection.class.name} (#{attributes_collection.inspect})"
- end
+ unless attributes_collection.is_a?(Hash) || attributes_collection.is_a?(Array)
+ raise ArgumentError, "Hash or Array expected for attribute `#{association_name}`, got #{attributes_collection.class.name} (#{attributes_collection.inspect})"
+ end
- check_record_limit!(options[:limit], attributes_collection)
+ check_record_limit!(options[:limit], attributes_collection)
- if attributes_collection.is_a? Hash
- keys = attributes_collection.keys
- attributes_collection = if keys.include?('id') || keys.include?(:id)
- [attributes_collection]
- else
- attributes_collection.values
+ if attributes_collection.is_a? Hash
+ keys = attributes_collection.keys
+ attributes_collection = if keys.include?("id") || keys.include?(:id)
+ [attributes_collection]
+ else
+ attributes_collection.values
+ end
end
- end
-
- association = association(association_name)
- existing_records = if association.loaded?
- association.target
- else
- attribute_ids = attributes_collection.map {|a| a['id'] || a[:id] }.compact
- attribute_ids.empty? ? [] : association.scope.where(association.klass.primary_key => attribute_ids)
- end
+ association = association(association_name)
- attributes_collection.each do |attributes|
- if attributes.respond_to?(:permitted?)
- attributes = attributes.to_h
+ existing_records = if association.loaded?
+ association.target
+ else
+ attribute_ids = attributes_collection.map { |a| a["id"] || a[:id] }.compact
+ attribute_ids.empty? ? [] : association.scope.where(association.klass.primary_key => attribute_ids)
end
- attributes = attributes.with_indifferent_access
- if attributes['id'].blank?
- unless reject_new_record?(association_name, attributes)
- association.build(attributes.except(*UNASSIGNABLE_KEYS))
+ attributes_collection.each do |attributes|
+ if attributes.respond_to?(:permitted?)
+ attributes = attributes.to_h
end
- elsif existing_record = existing_records.detect { |record| record.id.to_s == attributes['id'].to_s }
- unless call_reject_if(association_name, attributes)
- # Make sure we are operating on the actual object which is in the association's
- # proxy_target array (either by finding it, or adding it if not found)
- # Take into account that the proxy_target may have changed due to callbacks
- target_record = association.target.detect { |record| record.id.to_s == attributes['id'].to_s }
- if target_record
- existing_record = target_record
- else
- association.add_to_target(existing_record, :skip_callbacks)
- end
+ attributes = attributes.with_indifferent_access
- assign_to_or_mark_for_destruction(existing_record, attributes, options[:allow_destroy])
+ if attributes["id"].blank?
+ unless reject_new_record?(association_name, attributes)
+ association.reader.build(attributes.except(*UNASSIGNABLE_KEYS))
+ end
+ elsif existing_record = existing_records.detect { |record| record.id.to_s == attributes["id"].to_s }
+ unless call_reject_if(association_name, attributes)
+ # Make sure we are operating on the actual object which is in the association's
+ # proxy_target array (either by finding it, or adding it if not found)
+ # Take into account that the proxy_target may have changed due to callbacks
+ target_record = association.target.detect { |record| record.id.to_s == attributes["id"].to_s }
+ if target_record
+ existing_record = target_record
+ else
+ association.add_to_target(existing_record, :skip_callbacks)
+ end
+
+ assign_to_or_mark_for_destruction(existing_record, attributes, options[:allow_destroy])
+ end
+ else
+ raise_nested_attributes_record_not_found!(association_name, attributes["id"])
end
- else
- raise_nested_attributes_record_not_found!(association_name, attributes['id'])
end
end
- end
- # Takes in a limit and checks if the attributes_collection has too many
- # records. It accepts limit in the form of symbol, proc, or
- # number-like object (anything that can be compared with an integer).
- #
- # Raises TooManyRecords error if the attributes_collection is
- # larger than the limit.
- def check_record_limit!(limit, attributes_collection)
- if limit
- limit = case limit
- when Symbol
- send(limit)
- when Proc
- limit.call
- else
- limit
- end
+ # Takes in a limit and checks if the attributes_collection has too many
+ # records. It accepts limit in the form of symbol, proc, or
+ # number-like object (anything that can be compared with an integer).
+ #
+ # Raises TooManyRecords error if the attributes_collection is
+ # larger than the limit.
+ def check_record_limit!(limit, attributes_collection)
+ if limit
+ limit = \
+ case limit
+ when Symbol
+ send(limit)
+ when Proc
+ limit.call
+ else
+ limit
+ end
- if limit && attributes_collection.size > limit
- raise TooManyRecords, "Maximum #{limit} records are allowed. Got #{attributes_collection.size} records instead."
+ if limit && attributes_collection.size > limit
+ raise TooManyRecords, "Maximum #{limit} records are allowed. Got #{attributes_collection.size} records instead."
+ end
end
end
- end
- # Updates a record with the +attributes+ or marks it for destruction if
- # +allow_destroy+ is +true+ and has_destroy_flag? returns +true+.
- def assign_to_or_mark_for_destruction(record, attributes, allow_destroy)
- record.assign_attributes(attributes.except(*UNASSIGNABLE_KEYS))
- record.mark_for_destruction if has_destroy_flag?(attributes) && allow_destroy
- end
+ # Updates a record with the +attributes+ or marks it for destruction if
+ # +allow_destroy+ is +true+ and has_destroy_flag? returns +true+.
+ def assign_to_or_mark_for_destruction(record, attributes, allow_destroy)
+ record.assign_attributes(attributes.except(*UNASSIGNABLE_KEYS))
+ record.mark_for_destruction if has_destroy_flag?(attributes) && allow_destroy
+ end
- # Determines if a hash contains a truthy _destroy key.
- def has_destroy_flag?(hash)
- Type::Boolean.new.cast(hash['_destroy'])
- end
+ # Determines if a hash contains a truthy _destroy key.
+ def has_destroy_flag?(hash)
+ Type::Boolean.new.cast(hash["_destroy"])
+ end
- # Determines if a new record should be rejected by checking
- # has_destroy_flag? or if a <tt>:reject_if</tt> proc exists for this
- # association and evaluates to +true+.
- def reject_new_record?(association_name, attributes)
- will_be_destroyed?(association_name, attributes) || call_reject_if(association_name, attributes)
- end
+ # Determines if a new record should be rejected by checking
+ # has_destroy_flag? or if a <tt>:reject_if</tt> proc exists for this
+ # association and evaluates to +true+.
+ def reject_new_record?(association_name, attributes)
+ will_be_destroyed?(association_name, attributes) || call_reject_if(association_name, attributes)
+ end
- # Determines if a record with the particular +attributes+ should be
- # rejected by calling the reject_if Symbol or Proc (if defined).
- # The reject_if option is defined by +accepts_nested_attributes_for+.
- #
- # Returns false if there is a +destroy_flag+ on the attributes.
- def call_reject_if(association_name, attributes)
- return false if will_be_destroyed?(association_name, attributes)
+ # Determines if a record with the particular +attributes+ should be
+ # rejected by calling the reject_if Symbol or Proc (if defined).
+ # The reject_if option is defined by +accepts_nested_attributes_for+.
+ #
+ # Returns false if there is a +destroy_flag+ on the attributes.
+ def call_reject_if(association_name, attributes)
+ return false if will_be_destroyed?(association_name, attributes)
- case callback = self.nested_attributes_options[association_name][:reject_if]
- when Symbol
- method(callback).arity == 0 ? send(callback) : send(callback, attributes)
- when Proc
- callback.call(attributes)
+ case callback = nested_attributes_options[association_name][:reject_if]
+ when Symbol
+ method(callback).arity == 0 ? send(callback) : send(callback, attributes)
+ when Proc
+ callback.call(attributes)
+ end
end
- end
- # Only take into account the destroy flag if <tt>:allow_destroy</tt> is true
- def will_be_destroyed?(association_name, attributes)
- allow_destroy?(association_name) && has_destroy_flag?(attributes)
- end
+ # Only take into account the destroy flag if <tt>:allow_destroy</tt> is true
+ def will_be_destroyed?(association_name, attributes)
+ allow_destroy?(association_name) && has_destroy_flag?(attributes)
+ end
- def allow_destroy?(association_name)
- self.nested_attributes_options[association_name][:allow_destroy]
- end
+ def allow_destroy?(association_name)
+ nested_attributes_options[association_name][:allow_destroy]
+ end
- def raise_nested_attributes_record_not_found!(association_name, record_id)
- model = self.class._reflect_on_association(association_name).klass.name
- raise RecordNotFound.new("Couldn't find #{model} with ID=#{record_id} for #{self.class.name} with ID=#{id}",
- model, 'id', record_id)
- end
+ def raise_nested_attributes_record_not_found!(association_name, record_id)
+ model = self.class._reflect_on_association(association_name).klass.name
+ raise RecordNotFound.new("Couldn't find #{model} with ID=#{record_id} for #{self.class.name} with ID=#{id}",
+ model, "id", record_id)
+ end
end
end
diff --git a/activerecord/lib/active_record/no_touching.rb b/activerecord/lib/active_record/no_touching.rb
index edb5066fa0..697076bdae 100644
--- a/activerecord/lib/active_record/no_touching.rb
+++ b/activerecord/lib/active_record/no_touching.rb
@@ -1,10 +1,12 @@
+# frozen_string_literal: true
+
module ActiveRecord
# = Active Record No Touching
module NoTouching
extend ActiveSupport::Concern
module ClassMethods
- # Lets you selectively disable calls to `touch` for the
+ # Lets you selectively disable calls to +touch+ for the
# duration of a block.
#
# ==== Examples
@@ -41,10 +43,21 @@ module ActiveRecord
end
end
+ # Returns +true+ if the class has +no_touching+ set, +false+ otherwise.
+ #
+ # Project.no_touching do
+ # Project.first.no_touching? # true
+ # Message.first.no_touching? # false
+ # end
+ #
def no_touching?
NoTouching.applied_to?(self.class)
end
+ def touch_later(*) # :nodoc:
+ super unless no_touching?
+ end
+
def touch(*) # :nodoc:
super unless no_touching?
end
diff --git a/activerecord/lib/active_record/null_relation.rb b/activerecord/lib/active_record/null_relation.rb
index 1ab4e0404f..cf0de0fdeb 100644
--- a/activerecord/lib/active_record/null_relation.rb
+++ b/activerecord/lib/active_record/null_relation.rb
@@ -1,14 +1,12 @@
+# frozen_string_literal: true
+
module ActiveRecord
module NullRelation # :nodoc:
- def exec_queries
- @records = [].freeze
- end
-
def pluck(*column_names)
[]
end
- def delete_all(_conditions = nil)
+ def delete_all
0
end
@@ -20,10 +18,6 @@ module ActiveRecord
0
end
- def size
- calculate :size, nil
- end
-
def empty?
true
end
@@ -48,33 +42,12 @@ module ActiveRecord
""
end
- def count(*)
- calculate :count, nil
- end
-
- def sum(*)
- calculate :sum, nil
- end
-
- def average(*)
- calculate :average, nil
- end
-
- def minimum(*)
- calculate :minimum, nil
- end
-
- def maximum(*)
- calculate :maximum, nil
- end
-
def calculate(operation, _column_name)
- if [:count, :sum, :size].include? operation
+ case operation
+ when :count, :sum
group_values.any? ? Hash.new : 0
- elsif [:average, :minimum, :maximum].include?(operation) && group_values.any?
- Hash.new
- else
- nil
+ when :average, :minimum, :maximum
+ group_values.any? ? Hash.new : nil
end
end
@@ -85,5 +58,11 @@ module ActiveRecord
def or(other)
other.spawn
end
+
+ private
+
+ def exec_queries
+ @records = [].freeze
+ end
end
end
diff --git a/activerecord/lib/active_record/persistence.rb b/activerecord/lib/active_record/persistence.rb
index afed5e5e85..7705cefa59 100644
--- a/activerecord/lib/active_record/persistence.rb
+++ b/activerecord/lib/active_record/persistence.rb
@@ -1,3 +1,7 @@
+# frozen_string_literal: true
+
+require "active_record/insert_all"
+
module ActiveRecord
# = Active Record \Persistence
module Persistence
@@ -53,6 +57,192 @@ module ActiveRecord
end
end
+ # Inserts a single record into the database in a single SQL INSERT
+ # statement. It does not instantiate any models nor does it trigger
+ # Active Record callbacks or validations. Though passed values
+ # go through Active Record's type casting and serialization.
+ #
+ # See <tt>ActiveRecord::Persistence#insert_all</tt> for documentation.
+ def insert(attributes, returning: nil, unique_by: nil)
+ insert_all([ attributes ], returning: returning, unique_by: unique_by)
+ end
+
+ # Inserts multiple records into the database in a single SQL INSERT
+ # statement. It does not instantiate any models nor does it trigger
+ # Active Record callbacks or validations. Though passed values
+ # go through Active Record's type casting and serialization.
+ #
+ # The +attributes+ parameter is an Array of Hashes. Every Hash determines
+ # the attributes for a single row and must have the same keys.
+ #
+ # Rows are considered to be unique by every unique index on the table. Any
+ # duplicate rows are skipped.
+ # Override with <tt>:unique_by</tt> (see below).
+ #
+ # Returns an <tt>ActiveRecord::Result</tt> with its contents based on
+ # <tt>:returning</tt> (see below).
+ #
+ # ==== Options
+ #
+ # [:returning]
+ # (Postgres-only) An array of attributes to return for all successfully
+ # inserted records, which by default is the primary key.
+ # Pass <tt>returning: %w[ id name ]</tt> for both id and name
+ # or <tt>returning: false</tt> to omit the underlying <tt>RETURNING</tt> SQL
+ # clause entirely.
+ #
+ # [:unique_by]
+ # (Postgres and SQLite only) By default rows are considered to be unique
+ # by every unique index on the table. Any duplicate rows are skipped.
+ #
+ # To skip rows according to just one unique index pass <tt>:unique_by</tt>.
+ #
+ # Consider a Book model where no duplicate ISBNs make sense, but if any
+ # row has an existing id, or is not unique by another unique index,
+ # <tt>ActiveRecord::RecordNotUnique</tt> is raised.
+ #
+ # Unique indexes can be identified by columns or name:
+ #
+ # unique_by: :isbn
+ # unique_by: %i[ author_id name ]
+ # unique_by: :index_books_on_isbn
+ #
+ # Because it relies on the index information from the database
+ # <tt>:unique_by</tt> is recommended to be paired with
+ # Active Record's schema_cache.
+ #
+ # ==== Example
+ #
+ # # Insert records and skip inserting any duplicates.
+ # # Here "Eloquent Ruby" is skipped because its id is not unique.
+ #
+ # Book.insert_all([
+ # { id: 1, title: "Rework", author: "David" },
+ # { id: 1, title: "Eloquent Ruby", author: "Russ" }
+ # ])
+ def insert_all(attributes, returning: nil, unique_by: nil)
+ InsertAll.new(self, attributes, on_duplicate: :skip, returning: returning, unique_by: unique_by).execute
+ end
+
+ # Inserts a single record into the database in a single SQL INSERT
+ # statement. It does not instantiate any models nor does it trigger
+ # Active Record callbacks or validations. Though passed values
+ # go through Active Record's type casting and serialization.
+ #
+ # See <tt>ActiveRecord::Persistence#insert_all!</tt> for more.
+ def insert!(attributes, returning: nil)
+ insert_all!([ attributes ], returning: returning)
+ end
+
+ # Inserts multiple records into the database in a single SQL INSERT
+ # statement. It does not instantiate any models nor does it trigger
+ # Active Record callbacks or validations. Though passed values
+ # go through Active Record's type casting and serialization.
+ #
+ # The +attributes+ parameter is an Array of Hashes. Every Hash determines
+ # the attributes for a single row and must have the same keys.
+ #
+ # Raises <tt>ActiveRecord::RecordNotUnique</tt> if any rows violate a
+ # unique index on the table. In that case, no rows are inserted.
+ #
+ # To skip duplicate rows, see <tt>ActiveRecord::Persistence#insert_all</tt>.
+ # To replace them, see <tt>ActiveRecord::Persistence#upsert_all</tt>.
+ #
+ # Returns an <tt>ActiveRecord::Result</tt> with its contents based on
+ # <tt>:returning</tt> (see below).
+ #
+ # ==== Options
+ #
+ # [:returning]
+ # (Postgres-only) An array of attributes to return for all successfully
+ # inserted records, which by default is the primary key.
+ # Pass <tt>returning: %w[ id name ]</tt> for both id and name
+ # or <tt>returning: false</tt> to omit the underlying <tt>RETURNING</tt> SQL
+ # clause entirely.
+ #
+ # ==== Examples
+ #
+ # # Insert multiple records
+ # Book.insert_all!([
+ # { title: "Rework", author: "David" },
+ # { title: "Eloquent Ruby", author: "Russ" }
+ # ])
+ #
+ # # Raises ActiveRecord::RecordNotUnique because "Eloquent Ruby"
+ # # does not have a unique id.
+ # Book.insert_all!([
+ # { id: 1, title: "Rework", author: "David" },
+ # { id: 1, title: "Eloquent Ruby", author: "Russ" }
+ # ])
+ def insert_all!(attributes, returning: nil)
+ InsertAll.new(self, attributes, on_duplicate: :raise, returning: returning).execute
+ end
+
+ # Updates or inserts (upserts) multiple records into the database in a
+ # single SQL INSERT statement. It does not instantiate any models nor does
+ # it trigger Active Record callbacks or validations. Though passed values
+ # go through Active Record's type casting and serialization.
+ #
+ # See <tt>ActiveRecord::Persistence#upsert_all</tt> for documentation.
+ def upsert(attributes, returning: nil, unique_by: nil)
+ upsert_all([ attributes ], returning: returning, unique_by: unique_by)
+ end
+
+ # Updates or inserts (upserts) multiple records into the database in a
+ # single SQL INSERT statement. It does not instantiate any models nor does
+ # it trigger Active Record callbacks or validations. Though passed values
+ # go through Active Record's type casting and serialization.
+ #
+ # The +attributes+ parameter is an Array of Hashes. Every Hash determines
+ # the attributes for a single row and must have the same keys.
+ #
+ # Returns an <tt>ActiveRecord::Result</tt> with its contents based on
+ # <tt>:returning</tt> (see below).
+ #
+ # ==== Options
+ #
+ # [:returning]
+ # (Postgres-only) An array of attributes to return for all successfully
+ # inserted records, which by default is the primary key.
+ # Pass <tt>returning: %w[ id name ]</tt> for both id and name
+ # or <tt>returning: false</tt> to omit the underlying <tt>RETURNING</tt> SQL
+ # clause entirely.
+ #
+ # [:unique_by]
+ # (Postgres and SQLite only) By default rows are considered to be unique
+ # by every unique index on the table. Any duplicate rows are skipped.
+ #
+ # To skip rows according to just one unique index pass <tt>:unique_by</tt>.
+ #
+ # Consider a Book model where no duplicate ISBNs make sense, but if any
+ # row has an existing id, or is not unique by another unique index,
+ # <tt>ActiveRecord::RecordNotUnique</tt> is raised.
+ #
+ # Unique indexes can be identified by columns or name:
+ #
+ # unique_by: :isbn
+ # unique_by: %i[ author_id name ]
+ # unique_by: :index_books_on_isbn
+ #
+ # Because it relies on the index information from the database
+ # <tt>:unique_by</tt> is recommended to be paired with
+ # Active Record's schema_cache.
+ #
+ # ==== Examples
+ #
+ # # Inserts multiple records, performing an upsert when records have duplicate ISBNs.
+ # # Here "Eloquent Ruby" overwrites "Rework" because its ISBN is duplicate.
+ #
+ # Book.upsert_all([
+ # { title: "Rework", author: "David", isbn: "1" },
+ # { title: "Eloquent Ruby", author: "Russ", isbn: "1" }
+ # ], unique_by: :isbn)
+ #
+ # Book.find_by(isbn: "1").title # => "Eloquent Ruby"
+ def upsert_all(attributes, returning: nil, unique_by: nil)
+ InsertAll.new(self, attributes, on_duplicate: :update, returning: returning, unique_by: unique_by).execute
+ end
+
# Given an attributes hash, +instantiate+ returns a new instance of
# the appropriate class. Accepts only keys as strings.
#
@@ -63,13 +253,155 @@ module ActiveRecord
#
# See <tt>ActiveRecord::Inheritance#discriminate_class_for_record</tt> to see
# how this "single-table" inheritance mapping is implemented.
- def instantiate(attributes, column_types = {})
+ def instantiate(attributes, column_types = {}, &block)
klass = discriminate_class_for_record(attributes)
- attributes = klass.attributes_builder.build_from_database(attributes, column_types)
- klass.allocate.init_with('attributes' => attributes, 'new_record' => false)
+ instantiate_instance_of(klass, attributes, column_types, &block)
+ end
+
+ # Updates an object (or multiple objects) and saves it to the database, if validations pass.
+ # The resulting object is returned whether the object was saved successfully to the database or not.
+ #
+ # ==== Parameters
+ #
+ # * +id+ - This should be the id or an array of ids to be updated.
+ # * +attributes+ - This should be a hash of attributes or an array of hashes.
+ #
+ # ==== Examples
+ #
+ # # Updates one record
+ # Person.update(15, user_name: "Samuel", group: "expert")
+ #
+ # # Updates multiple records
+ # people = { 1 => { "first_name" => "David" }, 2 => { "first_name" => "Jeremy" } }
+ # Person.update(people.keys, people.values)
+ #
+ # # Updates multiple records from the result of a relation
+ # people = Person.where(group: "expert")
+ # people.update(group: "masters")
+ #
+ # Note: Updating a large number of records will run an UPDATE
+ # query for each record, which may cause a performance issue.
+ # When running callbacks is not needed for each record update,
+ # it is preferred to use {update_all}[rdoc-ref:Relation#update_all]
+ # for updating all records in a single query.
+ def update(id = :all, attributes)
+ if id.is_a?(Array)
+ id.map { |one_id| find(one_id) }.each_with_index { |object, idx|
+ object.update(attributes[idx])
+ }
+ elsif id == :all
+ all.each { |record| record.update(attributes) }
+ else
+ if ActiveRecord::Base === id
+ raise ArgumentError,
+ "You are passing an instance of ActiveRecord::Base to `update`. " \
+ "Please pass the id of the object by calling `.id`."
+ end
+ object = find(id)
+ object.update(attributes)
+ object
+ end
+ end
+
+ # Destroy an object (or multiple objects) that has the given id. The object is instantiated first,
+ # therefore all callbacks and filters are fired off before the object is deleted. This method is
+ # less efficient than #delete but allows cleanup methods and other actions to be run.
+ #
+ # This essentially finds the object (or multiple objects) with the given id, creates a new object
+ # from the attributes, and then calls destroy on it.
+ #
+ # ==== Parameters
+ #
+ # * +id+ - This should be the id or an array of ids to be destroyed.
+ #
+ # ==== Examples
+ #
+ # # Destroy a single object
+ # Todo.destroy(1)
+ #
+ # # Destroy multiple objects
+ # todos = [1,2,3]
+ # Todo.destroy(todos)
+ def destroy(id)
+ if id.is_a?(Array)
+ find(id).each(&:destroy)
+ else
+ find(id).destroy
+ end
+ end
+
+ # Deletes the row with a primary key matching the +id+ argument, using an
+ # SQL +DELETE+ statement, and returns the number of rows deleted. Active
+ # Record objects are not instantiated, so the object's callbacks are not
+ # executed, including any <tt>:dependent</tt> association options.
+ #
+ # You can delete multiple rows at once by passing an Array of <tt>id</tt>s.
+ #
+ # Note: Although it is often much faster than the alternative, #destroy,
+ # skipping callbacks might bypass business logic in your application
+ # that ensures referential integrity or performs other essential jobs.
+ #
+ # ==== Examples
+ #
+ # # Delete a single row
+ # Todo.delete(1)
+ #
+ # # Delete multiple rows
+ # Todo.delete([2,3,4])
+ def delete(id_or_array)
+ delete_by(primary_key => id_or_array)
+ end
+
+ def _insert_record(values) # :nodoc:
+ primary_key_value = nil
+
+ if primary_key && Hash === values
+ primary_key_value = values[primary_key]
+
+ if !primary_key_value && prefetch_primary_key?
+ primary_key_value = next_sequence_value
+ values[primary_key] = primary_key_value
+ end
+ end
+
+ if values.empty?
+ im = arel_table.compile_insert(connection.empty_insert_statement_value(primary_key))
+ im.into arel_table
+ else
+ im = arel_table.compile_insert(_substitute_values(values))
+ end
+
+ connection.insert(im, "#{self} Create", primary_key || false, primary_key_value)
+ end
+
+ def _update_record(values, constraints) # :nodoc:
+ constraints = _substitute_values(constraints).map { |attr, bind| attr.eq(bind) }
+
+ um = arel_table.where(
+ constraints.reduce(&:and)
+ ).compile_update(_substitute_values(values), primary_key)
+
+ connection.update(um, "#{self} Update")
+ end
+
+ def _delete_record(constraints) # :nodoc:
+ constraints = _substitute_values(constraints).map { |attr, bind| attr.eq(bind) }
+
+ dm = Arel::DeleteManager.new
+ dm.from(arel_table)
+ dm.wheres = constraints
+
+ connection.delete(dm, "#{self} Destroy")
end
private
+ # Given a class, an attributes hash, +instantiate_instance_of+ returns a
+ # new instance of the class. Accepts only keys as strings.
+ def instantiate_instance_of(klass, attributes, column_types = {}, &block)
+ attributes = klass.attributes_builder.build_from_database(attributes, column_types)
+ klass.allocate.init_with_attributes(attributes, &block)
+ end
+
# Called by +instantiate+ to decide which class to use for a new
# record instance.
#
@@ -78,6 +410,14 @@ module ActiveRecord
def discriminate_class_for_record(record)
self
end
+
+ def _substitute_values(values)
+ values.map do |name, value|
+ attr = arel_attribute(name)
+ bind = predicate_builder.build_bind_attribute(name, value)
+ [attr, bind]
+ end
+ end
end
# Returns true if this object hasn't been saved yet -- that is, a record
@@ -100,6 +440,10 @@ module ActiveRecord
!(@new_record || @destroyed)
end
+ ##
+ # :call-seq:
+ # save(*args)
+ #
# Saves the model.
#
# If the model is new, a record gets created in the database, otherwise
@@ -107,7 +451,7 @@ module ActiveRecord
#
# By default, save always runs validations. If any of them fail the action
# is cancelled and #save returns +false+, and the record won't be saved. However, if you supply
- # validate: false, validations are bypassed altogether. See
+ # <tt>validate: false</tt>, validations are bypassed altogether. See
# ActiveRecord::Validations for more information.
#
# By default, #save also sets the +updated_at+/+updated_on+ attributes to
@@ -121,12 +465,16 @@ module ActiveRecord
#
# Attributes marked as readonly are silently ignored if the record is
# being updated.
- def save(*args)
- create_or_update(*args)
+ def save(*args, &block)
+ create_or_update(*args, &block)
rescue ActiveRecord::RecordInvalid
false
end
+ ##
+ # :call-seq:
+ # save!(*args)
+ #
# Saves the model.
#
# If the model is new, a record gets created in the database, otherwise
@@ -134,7 +482,7 @@ module ActiveRecord
#
# By default, #save! always runs validations. If any of them fail
# ActiveRecord::RecordInvalid gets raised, and the record won't be saved. However, if you supply
- # validate: false, validations are bypassed altogether. See
+ # <tt>validate: false</tt>, validations are bypassed altogether. See
# ActiveRecord::Validations for more information.
#
# By default, #save! also sets the +updated_at+/+updated_on+ attributes to
@@ -148,8 +496,10 @@ module ActiveRecord
#
# Attributes marked as readonly are silently ignored if the record is
# being updated.
- def save!(*args)
- create_or_update(*args) || raise(RecordNotSaved.new("Failed to save the record", self))
+ #
+ # Unless an error is raised, returns true.
+ def save!(*args, &block)
+ create_or_update(*args, &block) || raise(RecordNotSaved.new("Failed to save the record", self))
end
# Deletes the record in the database and freezes this instance to
@@ -165,7 +515,7 @@ module ActiveRecord
# callbacks or any <tt>:dependent</tt> association
# options, use <tt>#destroy</tt>.
def delete
- self.class.delete(id) if persisted?
+ _delete_row if persisted?
@destroyed = true
freeze
end
@@ -178,10 +528,14 @@ module ActiveRecord
# and #destroy returns +false+.
# See ActiveRecord::Callbacks for further details.
def destroy
- raise ReadOnlyRecord, "#{self.class} is marked as readonly" if readonly?
+ _raise_readonly_record_error if readonly?
destroy_associations
self.class.connection.add_transaction_record(self)
- destroy_row if persisted?
+ @_trigger_destroy_callback = if persisted?
+ destroy_row > 0
+ else
+ true
+ end
@destroyed = true
freeze
end
@@ -210,9 +564,10 @@ module ActiveRecord
# Any change to the attributes on either instance will affect both instances.
# If you want to change the sti column as well, use #becomes! instead.
def becomes(klass)
- became = klass.new
+ became = klass.allocate
+ became.send(:initialize)
became.instance_variable_set("@attributes", @attributes)
- became.instance_variable_set("@mutation_tracker", @mutation_tracker) if defined?(@mutation_tracker)
+ became.instance_variable_set("@mutations_from_database", @mutations_from_database ||= nil)
became.instance_variable_set("@changed_attributes", attributes_changed_by_setter)
became.instance_variable_set("@new_record", new_record?)
became.instance_variable_set("@destroyed", destroyed?)
@@ -252,7 +607,8 @@ module ActiveRecord
name = name.to_s
verify_readonly_attribute(name)
public_send("#{name}=", value)
- save(validate: false) if changed?
+
+ save(validate: false)
end
# Updates the attributes of the model from the passed-in hash and saves the
@@ -268,6 +624,7 @@ module ActiveRecord
end
alias update_attributes update
+ deprecate update_attributes: "please, use update instead"
# Updates its receiver just like #update but calls #save! instead
# of +save+, so an exception is raised if the record is invalid and saving will fail.
@@ -281,6 +638,7 @@ module ActiveRecord
end
alias update_attributes! update!
+ deprecate update_attributes!: "please, use update! instead"
# Equivalent to <code>update_columns(name => value)</code>.
def update_column(name, value)
@@ -311,13 +669,17 @@ module ActiveRecord
verify_readonly_attribute(key.to_s)
end
- updated_count = self.class.unscoped.where(self.class.primary_key => id).update_all(attributes)
-
+ id_in_database = self.id_in_database
attributes.each do |k, v|
- raw_write_attribute(k, v)
+ write_attribute_without_type_cast(k, v)
end
- updated_count == 1
+ affected_rows = self.class._update_record(
+ attributes,
+ self.class.primary_key => id_in_database
+ )
+
+ affected_rows == 1
end
# Initializes +attribute+ to zero if +nil+ and adds the value passed as +by+ (default is 1).
@@ -329,14 +691,16 @@ module ActiveRecord
self
end
- # Wrapper around #increment that saves the record. This method differs from
- # its non-bang version in that it passes through the attribute setter.
- # Saving is not subjected to validation checks. Returns +true+ if the
- # record could be saved.
- def increment!(attribute, by = 1)
+ # Wrapper around #increment that writes the update to the database.
+ # Only +attribute+ is updated; the record itself is not saved.
+ # This means that any other modified attributes will still be dirty.
+ # Validations and callbacks are skipped. Supports the +touch+ option from
+ # +update_counters+, see that for more.
+ # Returns +self+.
+ def increment!(attribute, by = 1, touch: nil)
increment(attribute, by)
- change = public_send(attribute) - (attribute_was(attribute.to_s) || 0)
- self.class.update_counters(id, attribute => change)
+ change = public_send(attribute) - (attribute_in_database(attribute.to_s) || 0)
+ self.class.update_counters(id, attribute => change, touch: touch)
clear_attribute_change(attribute) # eww
self
end
@@ -348,12 +712,14 @@ module ActiveRecord
increment(attribute, -by)
end
- # Wrapper around #decrement that saves the record. This method differs from
- # its non-bang version in the sense that it passes through the attribute setter.
- # Saving is not subjected to validation checks. Returns +true+ if the
- # record could be saved.
- def decrement!(attribute, by = 1)
- increment!(attribute, -by)
+ # Wrapper around #decrement that writes the update to the database.
+ # Only +attribute+ is updated; the record itself is not saved.
+ # This means that any other modified attributes will still be dirty.
+ # Validations and callbacks are skipped. Supports the +touch+ option from
+ # +update_counters+, see that for more.
+ # Returns +self+.
+ def decrement!(attribute, by = 1, touch: nil)
+ increment!(attribute, -by, touch: touch)
end
# Assigns to +attribute+ the boolean opposite of <tt>attribute?</tt>. So
@@ -383,8 +749,8 @@ module ActiveRecord
# Reloads the record from the database.
#
- # This method finds record by its primary key (which could be assigned manually) and
- # modifies the receiver in-place:
+ # This method finds the record by its primary key (which could be assigned
+ # manually) and modifies the receiver in-place:
#
# account = Account.new
# # => #<Account id: nil, email: nil>
@@ -439,7 +805,7 @@ module ActiveRecord
self.class.unscoped { self.class.find(id) }
end
- @attributes = fresh_object.instance_variable_get('@attributes')
+ @attributes = fresh_object.instance_variable_get("@attributes")
@new_record = false
self
end
@@ -479,37 +845,19 @@ module ActiveRecord
# ball.touch(:updated_at) # => raises ActiveRecordError
#
def touch(*names, time: nil)
- raise ActiveRecordError, "cannot touch on a new record object" unless persisted?
-
- time ||= current_time_from_proper_timezone
- attributes = timestamp_attributes_for_update_in_model
- attributes.concat(names)
-
- unless attributes.empty?
- changes = {}
-
- attributes.each do |column|
- column = column.to_s
- changes[column] = write_attribute(column, time)
- end
-
- clear_attribute_changes(changes.keys)
- primary_key = self.class.primary_key
- scope = self.class.unscoped.where(primary_key => _read_attribute(primary_key))
-
- if locking_enabled?
- locking_column = self.class.locking_column
- scope = scope.where(locking_column => _read_attribute(locking_column))
- changes[locking_column] = increment_lock
- end
-
- result = scope.update_all(changes) == 1
+ unless persisted?
+ raise ActiveRecordError, <<-MSG.squish
+ cannot touch on a new or destroyed record object. Consider using
+ persisted?, new_record?, or destroyed? before touching
+ MSG
+ end
- if !result && locking_enabled?
- raise ActiveRecord::StaleObjectError.new(self, "touch")
- end
+ attribute_names = timestamp_attributes_for_update_in_model
+ attribute_names |= names.map(&:to_s)
- result
+ unless attribute_names.empty?
+ affected_rows = _touch_row(attribute_names, time)
+ @_trigger_update_callback = affected_rows == 1
else
true
end
@@ -522,39 +870,71 @@ module ActiveRecord
end
def destroy_row
- relation_for_destroy.delete_all
+ _delete_row
+ end
+
+ def _delete_row
+ self.class._delete_record(self.class.primary_key => id_in_database)
end
- def relation_for_destroy
- self.class.unscoped.where(self.class.primary_key => id)
+ def _touch_row(attribute_names, time)
+ time ||= current_time_from_proper_timezone
+
+ attribute_names.each do |attr_name|
+ write_attribute(attr_name, time)
+ clear_attribute_change(attr_name)
+ end
+
+ _update_row(attribute_names, "touch")
end
- def create_or_update(*args)
- raise ReadOnlyRecord, "#{self.class} is marked as readonly" if readonly?
- result = new_record? ? _create_record : _update_record(*args)
+ def _update_row(attribute_names, attempted_action = "update")
+ self.class._update_record(
+ attributes_with_values(attribute_names),
+ self.class.primary_key => id_in_database
+ )
+ end
+
+ def create_or_update(**, &block)
+ _raise_readonly_record_error if readonly?
+ return false if destroyed?
+ result = new_record? ? _create_record(&block) : _update_record(&block)
result != false
end
# Updates the associated record with values matching those of the instance attributes.
# Returns the number of affected rows.
def _update_record(attribute_names = self.attribute_names)
- attributes_values = arel_attributes_with_values_for_update(attribute_names)
- if attributes_values.empty?
- 0
+ attribute_names = attributes_for_update(attribute_names)
+
+ if attribute_names.empty?
+ affected_rows = 0
+ @_trigger_update_callback = true
else
- self.class.unscoped._update_record attributes_values, id, id_was
+ affected_rows = _update_row(attribute_names)
+ @_trigger_update_callback = affected_rows == 1
end
+
+ yield(self) if block_given?
+
+ affected_rows
end
# Creates a record with values matching those of the instance attributes
# and returns its id.
def _create_record(attribute_names = self.attribute_names)
- attributes_values = arel_attributes_with_values_for_create(attribute_names)
+ attribute_names = attributes_for_create(attribute_names)
+
+ new_id = self.class._insert_record(
+ attributes_with_values(attribute_names)
+ )
- new_id = self.class.unscoped.insert attributes_values
self.id ||= new_id if self.class.primary_key
@new_record = false
+
+ yield(self) if block_given?
+
id
end
@@ -569,8 +949,14 @@ module ActiveRecord
@_association_destroy_exception = nil
end
+ # The name of the method used to touch a +belongs_to+ association when the
+ # +:touch+ option is used.
def belongs_to_touch_method
:touch
end
+
+ def _raise_readonly_record_error
+ raise ReadOnlyRecord, "#{self.class} is marked as readonly"
+ end
end
end
diff --git a/activerecord/lib/active_record/query_cache.rb b/activerecord/lib/active_record/query_cache.rb
index f451ed1764..43a21e629e 100644
--- a/activerecord/lib/active_record/query_cache.rb
+++ b/activerecord/lib/active_record/query_cache.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
# = Active Record Query Cache
class QueryCache
@@ -5,7 +7,7 @@ module ActiveRecord
# Enable the query cache within the block if Active Record is configured.
# If it's not, it will execute the given block.
def cache(&block)
- if ActiveRecord::Base.connected?
+ if connected? || !configurations.empty?
connection.cache(&block)
else
yield
@@ -15,7 +17,7 @@ module ActiveRecord
# Disable the query cache within the block if Active Record is configured.
# If it's not, it will execute the given block.
def uncached(&block)
- if ActiveRecord::Base.connected?
+ if connected? || !configurations.empty?
connection.uncached(&block)
else
yield
@@ -23,26 +25,28 @@ module ActiveRecord
end
end
- def self.install_executor_hooks(executor = ActiveSupport::Executor)
- executor.to_run do
- connection = ActiveRecord::Base.connection
- enabled = connection.query_cache_enabled
- connection_id = ActiveRecord::Base.connection_id
- connection.enable_query_cache!
-
- @restore_query_cache_settings = lambda do
- ActiveRecord::Base.connection_id = connection_id
- ActiveRecord::Base.connection.clear_query_cache
- ActiveRecord::Base.connection.disable_query_cache! unless enabled
- end
+ def self.run
+ pools = []
+
+ ActiveRecord::Base.connection_handlers.each do |key, handler|
+ pools << handler.connection_pool_list.reject { |p| p.query_cache_enabled }.each { |p| p.enable_query_cache! }
end
- executor.to_complete do
- @restore_query_cache_settings.call if defined?(@restore_query_cache_settings)
+ pools.flatten
+ end
+
+ def self.complete(pools)
+ pools.each { |pool| pool.disable_query_cache! }
- # FIXME: This should be skipped when env['rack.test']
- ActiveRecord::Base.clear_active_connections!
+ ActiveRecord::Base.connection_handlers.each do |_, handler|
+ handler.connection_pool_list.each do |pool|
+ pool.release_connection if pool.active_connection? && !pool.connection.transaction_open?
+ end
end
end
+
+ def self.install_executor_hooks(executor = ActiveSupport::Executor)
+ executor.register_hook(self)
+ end
end
end
diff --git a/activerecord/lib/active_record/querying.rb b/activerecord/lib/active_record/querying.rb
index 4e32d73001..ae1501f5a1 100644
--- a/activerecord/lib/active_record/querying.rb
+++ b/activerecord/lib/active_record/querying.rb
@@ -1,30 +1,37 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Querying
- delegate :find, :take, :take!, :first, :first!, :last, :last!, :exists?, :any?, :many?, :empty?, :none?, :one?, to: :all
- delegate :second, :second!, :third, :third!, :fourth, :fourth!, :fifth, :fifth!, :forty_two, :forty_two!, :third_to_last, :third_to_last!, :second_to_last, :second_to_last!, to: :all
- delegate :first_or_create, :first_or_create!, :first_or_initialize, to: :all
- delegate :find_or_create_by, :find_or_create_by!, :find_or_initialize_by, to: :all
- delegate :find_by, :find_by!, to: :all
- delegate :destroy, :destroy_all, :delete, :delete_all, :update, :update_all, to: :all
- delegate :find_each, :find_in_batches, :in_batches, to: :all
- delegate :select, :group, :order, :except, :reorder, :limit, :offset, :joins, :left_joins, :left_outer_joins, :or,
- :where, :rewhere, :preload, :eager_load, :includes, :from, :lock, :readonly,
- :having, :create_with, :uniq, :distinct, :references, :none, :unscope, to: :all
- delegate :count, :average, :minimum, :maximum, :sum, :calculate, to: :all
- delegate :pluck, :ids, to: :all
+ QUERYING_METHODS = [
+ :find, :find_by, :find_by!, :take, :take!, :first, :first!, :last, :last!,
+ :second, :second!, :third, :third!, :fourth, :fourth!, :fifth, :fifth!,
+ :forty_two, :forty_two!, :third_to_last, :third_to_last!, :second_to_last, :second_to_last!,
+ :exists?, :any?, :many?, :none?, :one?,
+ :first_or_create, :first_or_create!, :first_or_initialize,
+ :find_or_create_by, :find_or_create_by!, :find_or_initialize_by,
+ :create_or_find_by, :create_or_find_by!,
+ :destroy_all, :delete_all, :update_all, :destroy_by, :delete_by,
+ :find_each, :find_in_batches, :in_batches,
+ :select, :reselect, :order, :reorder, :group, :limit, :offset, :joins, :left_joins, :left_outer_joins,
+ :where, :rewhere, :preload, :extract_associated, :eager_load, :includes, :from, :lock, :readonly, :extending, :or,
+ :having, :create_with, :distinct, :references, :none, :unscope, :optimizer_hints, :merge, :except, :only,
+ :count, :average, :minimum, :maximum, :sum, :calculate, :annotate,
+ :pluck, :pick, :ids
+ ].freeze # :nodoc:
+ delegate(*QUERYING_METHODS, to: :all)
# Executes a custom SQL query against your database and returns all the results. The results will
- # be returned as an array with columns requested encapsulated as attributes of the model you call
- # this method from. If you call <tt>Product.find_by_sql</tt> then the results will be returned in
+ # be returned as an array, with the requested columns encapsulated as attributes of the model you call
+ # this method from. For example, if you call <tt>Product.find_by_sql</tt>, then the results will be returned in
# a +Product+ object with the attributes you specified in the SQL query.
#
- # If you call a complicated SQL query which spans multiple tables the columns specified by the
+ # If you call a complicated SQL query which spans multiple tables, the columns specified by the
# SELECT will be attributes of the model, whether or not they are columns of the corresponding
# table.
#
- # The +sql+ parameter is a full SQL query as a string. It will be called as is, there will be
- # no database agnostic conversions performed. This should be a last resort because using, for example,
- # MySQL specific terms will lock you to using that particular database engine or require you to
+ # The +sql+ parameter is a full SQL query as a string. It will be called as is; there will be
+ # no database agnostic conversions performed. This should be a last resort because using
+ # database-specific terms will lock you into using that particular database engine, or require you to
# change your call if you switch engines.
#
# # A simple SQL query spanning multiple tables
@@ -35,10 +42,10 @@ module ActiveRecord
#
# Post.find_by_sql ["SELECT title FROM posts WHERE author = ? AND created > ?", author_id, start_date]
# Post.find_by_sql ["SELECT body FROM comments WHERE author = :user_id OR approved_by = :user_id", { :user_id => user_id }]
- def find_by_sql(sql, binds = [], preparable: nil)
+ def find_by_sql(sql, binds = [], preparable: nil, &block)
result_set = connection.select_all(sanitize_sql(sql), "#{name} Load", binds, preparable: preparable)
column_types = result_set.column_types.dup
- columns_hash.each_key { |k| column_types.delete k }
+ attribute_types.each_key { |k| column_types.delete k }
message_bus = ActiveSupport::Notifications.instrumenter
payload = {
@@ -46,14 +53,21 @@ module ActiveRecord
class_name: name
}
- message_bus.instrument('instantiation.active_record', payload) do
- result_set.map { |record| instantiate(record, column_types) }
+ message_bus.instrument("instantiation.active_record", payload) do
+ if result_set.includes_column?(inheritance_column)
+ result_set.map { |record| instantiate(record, column_types, &block) }
+ else
+ # Instantiate a homogeneous set
+ result_set.map { |record| instantiate_instance_of(self, record, column_types, &block) }
+ end
end
end
# Returns the result of an SQL statement that should only include a COUNT(*) in the SELECT part.
# The use of this method should be restricted to complicated SQL queries that can't be executed
- # using the ActiveRecord::Calculations class methods. Look into those before using this.
+ # using the ActiveRecord::Calculations class methods. Look into those before using this method,
+ # as it could lock you into a specific database engine or require a code change to switch
+ # database engines.
#
# Product.count_by_sql "SELECT COUNT(*) FROM sales s, customers c WHERE s.customer_id = c.id"
# # => 12
@@ -62,8 +76,7 @@ module ActiveRecord
#
# * +sql+ - An SQL statement which should return a count query from the database, see the example above.
def count_by_sql(sql)
- sql = sanitize_conditions(sql)
- connection.select_value(sql, "#{name} Count").to_i
+ connection.select_value(sanitize_sql(sql), "#{name} Count").to_i
end
end
end
diff --git a/activerecord/lib/active_record/railtie.rb b/activerecord/lib/active_record/railtie.rb
index 98ea425d16..a1d7c893bf 100644
--- a/activerecord/lib/active_record/railtie.rb
+++ b/activerecord/lib/active_record/railtie.rb
@@ -1,9 +1,11 @@
+# frozen_string_literal: true
+
require "active_record"
require "rails"
require "active_model/railtie"
# For now, action_controller must always be present with
-# rails, so let's make sure that it gets required before
+# Rails, so let's make sure that it gets required before
# here. This is needed for correctly setting up the middleware.
# In the future, this might become an optional require.
require "action_controller/railtie"
@@ -13,20 +15,22 @@ module ActiveRecord
class Railtie < Rails::Railtie # :nodoc:
config.active_record = ActiveSupport::OrderedOptions.new
- config.app_generators.orm :active_record, :migration => true,
- :timestamps => true
+ config.app_generators.orm :active_record, migration: true,
+ timestamps: true
config.action_dispatch.rescue_responses.merge!(
- 'ActiveRecord::RecordNotFound' => :not_found,
- 'ActiveRecord::StaleObjectError' => :conflict,
- 'ActiveRecord::RecordInvalid' => :unprocessable_entity,
- 'ActiveRecord::RecordNotSaved' => :unprocessable_entity
+ "ActiveRecord::RecordNotFound" => :not_found,
+ "ActiveRecord::StaleObjectError" => :conflict,
+ "ActiveRecord::RecordInvalid" => :unprocessable_entity,
+ "ActiveRecord::RecordNotSaved" => :unprocessable_entity
)
-
config.active_record.use_schema_cache_dump = true
config.active_record.maintain_test_schema = true
+ config.active_record.sqlite3 = ActiveSupport::OrderedOptions.new
+ config.active_record.sqlite3.represent_boolean_as_integer = nil
+
config.eager_load_namespaces << ActiveRecord
rake_tasks do
@@ -35,8 +39,8 @@ module ActiveRecord
ActiveRecord::Tasks::DatabaseTasks.database_configuration = Rails.application.config.database_configuration
if defined?(ENGINE_ROOT) && engine = Rails::Engine.find(ENGINE_ROOT)
- if engine.paths['db/migrate'].existent
- ActiveRecord::Tasks::DatabaseTasks.migrations_paths += engine.paths['db/migrate'].to_a
+ if engine.paths["db/migrate"].existent
+ ActiveRecord::Tasks::DatabaseTasks.migrations_paths += engine.paths["db/migrate"].to_a
end
end
end
@@ -55,6 +59,7 @@ module ActiveRecord
console = ActiveSupport::Logger.new(STDERR)
Rails.logger.extend ActiveSupport::Logger.broadcast console
end
+ ActiveRecord::Base.verbose_query_logs = false
end
runner do
@@ -72,6 +77,10 @@ module ActiveRecord
ActiveSupport.on_load(:active_record) { self.logger ||= ::Rails.logger }
end
+ initializer "active_record.backtrace_cleaner" do
+ ActiveSupport.on_load(:active_record) { LogSubscriber.backtrace_cleaner = ::Rails.backtrace_cleaner }
+ end
+
initializer "active_record.migration_error" do
if config.active_record.delete(:migration_error) == :page_load
config.app_middleware.insert_after ::ActionDispatch::Callbacks,
@@ -79,19 +88,56 @@ module ActiveRecord
end
end
+ initializer "active_record.database_selector" do
+ if options = config.active_record.delete(:database_selector)
+ resolver = config.active_record.delete(:database_resolver)
+ operations = config.active_record.delete(:database_resolver_context)
+ config.app_middleware.use ActiveRecord::Middleware::DatabaseSelector, resolver, operations, options
+ end
+ end
+
+ initializer "Check for cache versioning support" do
+ config.after_initialize do |app|
+ ActiveSupport.on_load(:active_record) do
+ if app.config.active_record.cache_versioning && Rails.cache
+ unless Rails.cache.class.try(:supports_cache_versioning?)
+ raise <<-end_error
+
+You're using a cache store that doesn't support native cache versioning.
+Your best option is to upgrade to a newer version of #{Rails.cache.class}
+that supports cache versioning (#{Rails.cache.class}.supports_cache_versioning? #=> true).
+
+Next best, switch to a different cache store that does support cache versioning:
+https://guides.rubyonrails.org/caching_with_rails.html#cache-stores.
+
+To keep using the current cache store, you can turn off cache versioning entirely:
+
+ config.active_record.cache_versioning = false
+
+end_error
+ end
+ end
+ end
+ end
+ end
+
initializer "active_record.check_schema_cache_dump" do
if config.active_record.delete(:use_schema_cache_dump)
config.after_initialize do |app|
ActiveSupport.on_load(:active_record) do
- filename = File.join(app.config.paths["db"].first, "schema_cache.dump")
+ filename = File.join(app.config.paths["db"].first, "schema_cache.yml")
if File.file?(filename)
- cache = Marshal.load File.binread filename
- if cache.version == ActiveRecord::Migrator.current_version
- self.connection.schema_cache = cache
- self.connection_pool.schema_cache = cache.dup
+ current_version = ActiveRecord::Migrator.current_version
+
+ next if current_version.nil?
+
+ cache = YAML.load(File.read(filename))
+ if cache.version == current_version
+ connection.schema_cache = cache
+ connection_pool.schema_cache = cache.dup
else
- warn "Ignoring db/schema_cache.dump because it has expired. The current schema version is #{ActiveRecord::Migrator.current_version}, but the one in the cache is #{cache.version}."
+ warn "Ignoring db/schema_cache.yml because it has expired. The current schema version is #{current_version}, but the one in the cache is #{cache.version}."
end
end
end
@@ -99,17 +145,49 @@ module ActiveRecord
end
end
+ initializer "active_record.define_attribute_methods" do |app|
+ config.after_initialize do
+ ActiveSupport.on_load(:active_record) do
+ if app.config.eager_load
+ descendants.each do |model|
+ # SchemaMigration and InternalMetadata both override `table_exists?`
+ # to bypass the schema cache, so skip them to avoid the extra queries.
+ next if model._internal?
+
+ # If there's no connection yet, or the schema cache doesn't have the columns
+ # hash for the model cached, `define_attribute_methods` would trigger a query.
+ next unless model.connected? && model.connection.schema_cache.columns_hash?(model.table_name)
+
+ model.define_attribute_methods
+ end
+ end
+ end
+ end
+ end
+
initializer "active_record.warn_on_records_fetched_greater_than" do
if config.active_record.warn_on_records_fetched_greater_than
ActiveSupport.on_load(:active_record) do
- require 'active_record/relation/record_fetch_warning'
+ require "active_record/relation/record_fetch_warning"
end
end
end
initializer "active_record.set_configs" do |app|
ActiveSupport.on_load(:active_record) do
- app.config.active_record.each do |k,v|
+ configs = app.config.active_record
+
+ represent_boolean_as_integer = configs.sqlite3.delete(:represent_boolean_as_integer)
+
+ unless represent_boolean_as_integer.nil?
+ ActiveSupport.on_load(:active_record_sqlite3adapter) do
+ ActiveRecord::ConnectionAdapters::SQLite3Adapter.represent_boolean_as_integer = represent_boolean_as_integer
+ end
+ end
+
+ configs.delete(:sqlite3)
+
+ configs.each do |k, v|
send "#{k}=", v
end
end
@@ -119,22 +197,9 @@ module ActiveRecord
# and then establishes the connection.
initializer "active_record.initialize_database" do
ActiveSupport.on_load(:active_record) do
+ self.connection_handlers = { writing_role => ActiveRecord::Base.default_connection_handler }
self.configurations = Rails.application.config.database_configuration
-
- begin
- establish_connection
- rescue ActiveRecord::NoDatabaseError
- warn <<-end_warning
-Oops - You have a database configured, but it doesn't exist yet!
-
-Here's how to get started:
-
- 1. Configure your database in config/database.yml.
- 2. Run `bin/rails db:create` to create the database.
- 3. Run `bin/rails db:setup` to load your database schema.
-end_warning
- raise
- end
+ establish_connection
end
end
@@ -146,6 +211,13 @@ end_warning
end
end
+ initializer "active_record.collection_cache_association_loading" do
+ require "active_record/railties/collection_cache_association_loading"
+ ActiveSupport.on_load(:action_view) do
+ ActionView::PartialRenderer.prepend(ActiveRecord::Railties::CollectionCacheAssociationLoading)
+ end
+ end
+
initializer "active_record.set_reloader_hooks" do
ActiveSupport.on_load(:active_record) do
ActiveSupport::Reloader.before_class_unload do
@@ -158,14 +230,35 @@ end_warning
end
initializer "active_record.set_executor_hooks" do
- ActiveSupport.on_load(:active_record) do
- ActiveRecord::QueryCache.install_executor_hooks
- end
+ ActiveRecord::QueryCache.install_executor_hooks
end
initializer "active_record.add_watchable_files" do |app|
path = app.paths["db"].first
config.watchable_files.concat ["#{path}/schema.rb", "#{path}/structure.sql"]
end
+
+ initializer "active_record.clear_active_connections" do
+ config.after_initialize do
+ ActiveSupport.on_load(:active_record) do
+ # Ideally the application doesn't connect to the database during boot,
+ # but sometimes it does. In case it did, we want to empty out the
+ # connection pools so that a non-database-using process (e.g. a master
+ # process in a forking server model) doesn't retain a needless
+ # connection. If it was needed, the incremental cost of reestablishing
+ # this connection is trivial: the rest of the pool would need to be
+ # populated anyway.
+
+ clear_active_connections!
+ flush_idle_connections!
+ end
+ end
+ end
+
+ initializer "active_record.set_filter_attributes" do
+ ActiveSupport.on_load(:active_record) do
+ self.filter_attributes += Rails.application.config.filter_parameters
+ end
+ end
end
end
diff --git a/activerecord/lib/active_record/railties/collection_cache_association_loading.rb b/activerecord/lib/active_record/railties/collection_cache_association_loading.rb
new file mode 100644
index 0000000000..d57680aaaa
--- /dev/null
+++ b/activerecord/lib/active_record/railties/collection_cache_association_loading.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Railties # :nodoc:
+ module CollectionCacheAssociationLoading #:nodoc:
+ def setup(context, options, as, block)
+ @relation = relation_from_options(options)
+
+ super
+ end
+
+ def relation_from_options(cached: nil, partial: nil, collection: nil, **_)
+ return unless cached
+
+ relation = partial if partial.is_a?(ActiveRecord::Relation)
+ relation ||= collection if collection.is_a?(ActiveRecord::Relation)
+
+ if relation && !relation.loaded?
+ relation.skip_preloading!
+ end
+ end
+
+ def collection_without_template(*)
+ @relation.preload_associations(@collection) if @relation
+ super
+ end
+
+ def collection_with_template(*)
+ @relation.preload_associations(@collection) if @relation
+ super
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/railties/console_sandbox.rb b/activerecord/lib/active_record/railties/console_sandbox.rb
index 604a220303..8917638a5d 100644
--- a/activerecord/lib/active_record/railties/console_sandbox.rb
+++ b/activerecord/lib/active_record/railties/console_sandbox.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
ActiveRecord::Base.connection.begin_transaction(joinable: false)
at_exit do
diff --git a/activerecord/lib/active_record/railties/controller_runtime.rb b/activerecord/lib/active_record/railties/controller_runtime.rb
index 8727e46cb3..309441a057 100644
--- a/activerecord/lib/active_record/railties/controller_runtime.rb
+++ b/activerecord/lib/active_record/railties/controller_runtime.rb
@@ -1,50 +1,51 @@
-require 'active_support/core_ext/module/attr_internal'
-require 'active_record/log_subscriber'
+# frozen_string_literal: true
+
+require "active_support/core_ext/module/attr_internal"
+require "active_record/log_subscriber"
module ActiveRecord
module Railties # :nodoc:
module ControllerRuntime #:nodoc:
extend ActiveSupport::Concern
- protected
-
- attr_internal :db_runtime
-
- def process_action(action, *args)
- # We also need to reset the runtime before each action
- # because of queries in middleware or in cases we are streaming
- # and it won't be cleaned up by the method below.
- ActiveRecord::LogSubscriber.reset_runtime
- super
+ module ClassMethods # :nodoc:
+ def log_process_action(payload)
+ messages, db_runtime = super, payload[:db_runtime]
+ messages << ("ActiveRecord: %.1fms" % db_runtime.to_f) if db_runtime
+ messages
+ end
end
- def cleanup_view_runtime
- if logger.info? && ActiveRecord::Base.connected?
- db_rt_before_render = ActiveRecord::LogSubscriber.reset_runtime
- self.db_runtime = (db_runtime || 0) + db_rt_before_render
- runtime = super
- db_rt_after_render = ActiveRecord::LogSubscriber.reset_runtime
- self.db_runtime += db_rt_after_render
- runtime - db_rt_after_render
- else
+ private
+ attr_internal :db_runtime
+
+ def process_action(action, *args)
+ # We also need to reset the runtime before each action
+ # because of queries in middleware or in cases we are streaming
+ # and it won't be cleaned up by the method below.
+ ActiveRecord::LogSubscriber.reset_runtime
super
end
- end
- def append_info_to_payload(payload)
- super
- if ActiveRecord::Base.connected?
- payload[:db_runtime] = (db_runtime || 0) + ActiveRecord::LogSubscriber.reset_runtime
+ def cleanup_view_runtime
+ if logger && logger.info? && ActiveRecord::Base.connected?
+ db_rt_before_render = ActiveRecord::LogSubscriber.reset_runtime
+ self.db_runtime = (db_runtime || 0) + db_rt_before_render
+ runtime = super
+ db_rt_after_render = ActiveRecord::LogSubscriber.reset_runtime
+ self.db_runtime += db_rt_after_render
+ runtime - db_rt_after_render
+ else
+ super
+ end
end
- end
- module ClassMethods # :nodoc:
- def log_process_action(payload)
- messages, db_runtime = super, payload[:db_runtime]
- messages << ("ActiveRecord: %.1fms" % db_runtime.to_f) if db_runtime
- messages
+ def append_info_to_payload(payload)
+ super
+ if ActiveRecord::Base.connected?
+ payload[:db_runtime] = (db_runtime || 0) + ActiveRecord::LogSubscriber.reset_runtime
+ end
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/railties/databases.rake b/activerecord/lib/active_record/railties/databases.rake
index 00cf8536e1..447def8d77 100644
--- a/activerecord/lib/active_record/railties/databases.rake
+++ b/activerecord/lib/active_record/railties/databases.rake
@@ -1,40 +1,58 @@
-require 'active_record'
+# frozen_string_literal: true
+
+require "active_record"
db_namespace = namespace :db do
desc "Set the environment value for the database"
- task "environment:set" => [:environment, :load_config] do
+ task "environment:set" => :load_config do
ActiveRecord::InternalMetadata.create_table
- ActiveRecord::InternalMetadata[:environment] = ActiveRecord::Migrator.current_environment
+ ActiveRecord::InternalMetadata[:environment] = ActiveRecord::Base.connection.migration_context.current_environment
end
- task :check_protected_environments => [:environment, :load_config] do
+ task check_protected_environments: :load_config do
ActiveRecord::Tasks::DatabaseTasks.check_protected_environments!
end
- task :load_config do
+ task load_config: :environment do
ActiveRecord::Base.configurations = ActiveRecord::Tasks::DatabaseTasks.database_configuration || {}
ActiveRecord::Migrator.migrations_paths = ActiveRecord::Tasks::DatabaseTasks.migrations_paths
end
namespace :create do
- task :all => :load_config do
+ task all: :load_config do
ActiveRecord::Tasks::DatabaseTasks.create_all
end
+
+ ActiveRecord::Tasks::DatabaseTasks.for_each do |spec_name|
+ desc "Create #{spec_name} database for current environment"
+ task spec_name => :load_config do
+ db_config = ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, spec_name: spec_name)
+ ActiveRecord::Tasks::DatabaseTasks.create(db_config.config)
+ end
+ end
end
- desc 'Creates the database from DATABASE_URL or config/database.yml for the current RAILS_ENV (use db:create:all to create all databases in the config). Without RAILS_ENV or when RAILS_ENV is development, it defaults to creating the development and test databases.'
- task :create => [:load_config] do
+ desc "Creates the database from DATABASE_URL or config/database.yml for the current RAILS_ENV (use db:create:all to create all databases in the config). Without RAILS_ENV or when RAILS_ENV is development, it defaults to creating the development and test databases."
+ task create: [:load_config] do
ActiveRecord::Tasks::DatabaseTasks.create_current
end
namespace :drop do
- task :all => [:load_config, :check_protected_environments] do
+ task all: [:load_config, :check_protected_environments] do
ActiveRecord::Tasks::DatabaseTasks.drop_all
end
+
+ ActiveRecord::Tasks::DatabaseTasks.for_each do |spec_name|
+ desc "Drop #{spec_name} database for current environment"
+ task spec_name => [:load_config, :check_protected_environments] do
+ db_config = ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, spec_name: spec_name)
+ ActiveRecord::Tasks::DatabaseTasks.drop(db_config.config)
+ end
+ end
end
- desc 'Drops the database from DATABASE_URL or config/database.yml for the current RAILS_ENV (use db:drop:all to drop all databases in the config). Without RAILS_ENV or when RAILS_ENV is development, it defaults to dropping the development and test databases.'
- task :drop => [:load_config, :check_protected_environments] do
+ desc "Drops the database from DATABASE_URL or config/database.yml for the current RAILS_ENV (use db:drop:all to drop all databases in the config). Without RAILS_ENV or when RAILS_ENV is development, it defaults to dropping the development and test databases."
+ task drop: [:load_config, :check_protected_environments] do
db_namespace["drop:_unsafe"].invoke
end
@@ -43,20 +61,28 @@ db_namespace = namespace :db do
end
namespace :purge do
- task :all => [:load_config, :check_protected_environments] do
+ task all: [:load_config, :check_protected_environments] do
ActiveRecord::Tasks::DatabaseTasks.purge_all
end
end
+ # desc "Truncates tables of each database for current environment"
+ task truncate_all: [:load_config, :check_protected_environments] do
+ ActiveRecord::Tasks::DatabaseTasks.truncate_all
+ end
+
# desc "Empty the database from DATABASE_URL or config/database.yml for the current RAILS_ENV (use db:purge:all to purge all databases in the config). Without RAILS_ENV it defaults to purging the development and test databases."
- task :purge => [:load_config, :check_protected_environments] do
+ task purge: [:load_config, :check_protected_environments] do
ActiveRecord::Tasks::DatabaseTasks.purge_current
end
desc "Migrate the database (options: VERSION=x, VERBOSE=false, SCOPE=blog)."
- task :migrate => [:environment, :load_config] do
- ActiveRecord::Tasks::DatabaseTasks.migrate
- db_namespace['_dump'].invoke
+ task migrate: :load_config do
+ ActiveRecord::Base.configurations.configs_for(env_name: Rails.env).each do |db_config|
+ ActiveRecord::Base.establish_connection(db_config.config)
+ ActiveRecord::Tasks::DatabaseTasks.migrate
+ end
+ db_namespace["_dump"].invoke
end
# IMPORTANT: This task won't dump the schema if ActiveRecord::Base.dump_schema_after_migration is set to false
@@ -71,167 +97,188 @@ db_namespace = namespace :db do
end
# Allow this task to be called as many times as required. An example is the
# migrate:redo task, which calls other two internally that depend on this one.
- db_namespace['_dump'].reenable
+ db_namespace["_dump"].reenable
end
namespace :migrate do
+ ActiveRecord::Tasks::DatabaseTasks.for_each do |spec_name|
+ desc "Migrate #{spec_name} database for current environment"
+ task spec_name => :load_config do
+ db_config = ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, spec_name: spec_name)
+ ActiveRecord::Base.establish_connection(db_config.config)
+ ActiveRecord::Tasks::DatabaseTasks.migrate
+ end
+ end
+
# desc 'Rollbacks the database one migration and re migrate up (options: STEP=x, VERSION=x).'
- task :redo => [:environment, :load_config] do
- if ENV['VERSION']
- db_namespace['migrate:down'].invoke
- db_namespace['migrate:up'].invoke
+ task redo: :load_config do
+ raise "Empty VERSION provided" if ENV["VERSION"] && ENV["VERSION"].empty?
+
+ if ENV["VERSION"]
+ db_namespace["migrate:down"].invoke
+ db_namespace["migrate:up"].invoke
else
- db_namespace['rollback'].invoke
- db_namespace['migrate'].invoke
+ db_namespace["rollback"].invoke
+ db_namespace["migrate"].invoke
end
end
# desc 'Resets your database using your migrations for the current environment'
- task :reset => ['db:drop', 'db:create', 'db:migrate']
+ task reset: ["db:drop", "db:create", "db:migrate"]
# desc 'Runs the "up" for a given migration VERSION.'
- task :up => [:environment, :load_config] do
- version = ENV['VERSION'] ? ENV['VERSION'].to_i : nil
- raise 'VERSION is required' unless version
- ActiveRecord::Migrator.run(:up, ActiveRecord::Tasks::DatabaseTasks.migrations_paths, version)
- db_namespace['_dump'].invoke
+ task up: :load_config do
+ raise "VERSION is required" if !ENV["VERSION"] || ENV["VERSION"].empty?
+
+ ActiveRecord::Tasks::DatabaseTasks.check_target_version
+
+ ActiveRecord::Base.connection.migration_context.run(
+ :up,
+ ActiveRecord::Tasks::DatabaseTasks.target_version
+ )
+ db_namespace["_dump"].invoke
end
# desc 'Runs the "down" for a given migration VERSION.'
- task :down => [:environment, :load_config] do
- version = ENV['VERSION'] ? ENV['VERSION'].to_i : nil
- raise 'VERSION is required - To go down one migration, run db:rollback' unless version
- ActiveRecord::Migrator.run(:down, ActiveRecord::Tasks::DatabaseTasks.migrations_paths, version)
- db_namespace['_dump'].invoke
+ task down: :load_config do
+ raise "VERSION is required - To go down one migration, use db:rollback" if !ENV["VERSION"] || ENV["VERSION"].empty?
+
+ ActiveRecord::Tasks::DatabaseTasks.check_target_version
+
+ ActiveRecord::Base.connection.migration_context.run(
+ :down,
+ ActiveRecord::Tasks::DatabaseTasks.target_version
+ )
+ db_namespace["_dump"].invoke
end
- desc 'Display status of migrations'
- task :status => [:environment, :load_config] do
- unless ActiveRecord::SchemaMigration.table_exists?
- abort 'Schema migrations table does not exist yet.'
+ desc "Display status of migrations"
+ task status: :load_config do
+ ActiveRecord::Base.configurations.configs_for(env_name: Rails.env).each do |db_config|
+ ActiveRecord::Base.establish_connection(db_config.config)
+ ActiveRecord::Tasks::DatabaseTasks.migrate_status
end
- db_list = ActiveRecord::SchemaMigration.normalized_versions
-
- file_list =
- ActiveRecord::Tasks::DatabaseTasks.migrations_paths.flat_map do |path|
- Dir.foreach(path).map do |file|
- next unless ActiveRecord::Migrator.match_to_migration_filename?(file)
-
- version, name, scope = ActiveRecord::Migrator.parse_migration_filename(file)
- version = ActiveRecord::SchemaMigration.normalize_migration_number(version)
- status = db_list.delete(version) ? 'up' : 'down'
- [status, version, (name + scope).humanize]
- end.compact
- end
+ end
- db_list.map! do |version|
- ['up', version, '********** NO FILE **********']
- end
- # output
- puts "\ndatabase: #{ActiveRecord::Base.connection_config[:database]}\n\n"
- puts "#{'Status'.center(8)} #{'Migration ID'.ljust(14)} Migration Name"
- puts "-" * 50
- (db_list + file_list).sort_by { |_, version, _| version }.each do |status, version, name|
- puts "#{status.center(8)} #{version.ljust(14)} #{name}"
+ namespace :status do
+ ActiveRecord::Tasks::DatabaseTasks.for_each do |spec_name|
+ desc "Display status of migrations for #{spec_name} database"
+ task spec_name => :load_config do
+ db_config = ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, spec_name: spec_name)
+ ActiveRecord::Base.establish_connection(db_config.config)
+ ActiveRecord::Tasks::DatabaseTasks.migrate_status
+ end
end
- puts
end
end
- desc 'Rolls the schema back to the previous version (specify steps w/ STEP=n).'
- task :rollback => [:environment, :load_config] do
- step = ENV['STEP'] ? ENV['STEP'].to_i : 1
- ActiveRecord::Migrator.rollback(ActiveRecord::Tasks::DatabaseTasks.migrations_paths, step)
- db_namespace['_dump'].invoke
+ desc "Rolls the schema back to the previous version (specify steps w/ STEP=n)."
+ task rollback: :load_config do
+ step = ENV["STEP"] ? ENV["STEP"].to_i : 1
+ ActiveRecord::Base.connection.migration_context.rollback(step)
+ db_namespace["_dump"].invoke
end
# desc 'Pushes the schema to the next version (specify steps w/ STEP=n).'
- task :forward => [:environment, :load_config] do
- step = ENV['STEP'] ? ENV['STEP'].to_i : 1
- ActiveRecord::Migrator.forward(ActiveRecord::Tasks::DatabaseTasks.migrations_paths, step)
- db_namespace['_dump'].invoke
+ task forward: :load_config do
+ step = ENV["STEP"] ? ENV["STEP"].to_i : 1
+ ActiveRecord::Base.connection.migration_context.forward(step)
+ db_namespace["_dump"].invoke
end
# desc 'Drops and recreates the database from db/schema.rb for the current environment and loads the seeds.'
- task :reset => [ 'db:drop', 'db:setup' ]
+ task reset: [ "db:drop", "db:setup" ]
# desc "Retrieves the charset for the current environment's database"
- task :charset => [:environment, :load_config] do
+ task charset: :load_config do
puts ActiveRecord::Tasks::DatabaseTasks.charset_current
end
# desc "Retrieves the collation for the current environment's database"
- task :collation => [:environment, :load_config] do
- begin
- puts ActiveRecord::Tasks::DatabaseTasks.collation_current
- rescue NoMethodError
- $stderr.puts 'Sorry, your database adapter is not supported yet. Feel free to submit a patch.'
- end
+ task collation: :load_config do
+ puts ActiveRecord::Tasks::DatabaseTasks.collation_current
+ rescue NoMethodError
+ $stderr.puts "Sorry, your database adapter is not supported yet. Feel free to submit a patch."
end
- desc 'Retrieves the current schema version number'
- task :version => [:environment, :load_config] do
- puts "Current version: #{ActiveRecord::Migrator.current_version}"
+ desc "Retrieves the current schema version number"
+ task version: :load_config do
+ puts "Current version: #{ActiveRecord::Base.connection.migration_context.current_version}"
end
# desc "Raises an error if there are pending migrations"
- task :abort_if_pending_migrations => [:environment, :load_config] do
- pending_migrations = ActiveRecord::Migrator.open(ActiveRecord::Tasks::DatabaseTasks.migrations_paths).pending_migrations
+ task abort_if_pending_migrations: :load_config do
+ pending_migrations = ActiveRecord::Base.connection.migration_context.open.pending_migrations
if pending_migrations.any?
puts "You have #{pending_migrations.size} pending #{pending_migrations.size > 1 ? 'migrations:' : 'migration:'}"
pending_migrations.each do |pending_migration|
- puts ' %4d %s' % [pending_migration.version, pending_migration.name]
+ puts " %4d %s" % [pending_migration.version, pending_migration.name]
end
abort %{Run `rails db:migrate` to update your database then try again.}
end
end
- desc 'Creates the database, loads the schema, and initializes with the seed data (use db:reset to also drop the database first)'
- task :setup => ['db:schema:load_if_ruby', 'db:structure:load_if_sql', :seed]
+ desc "Creates the database, loads the schema, and initializes with the seed data (use db:reset to also drop the database first)"
+ task setup: ["db:schema:load_if_ruby", "db:structure:load_if_sql", :seed]
+
+ desc "Runs setup if database does not exist, or runs migrations if it does"
+ task prepare: :load_config do
+ ActiveRecord::Base.configurations.configs_for(env_name: Rails.env).each do |db_config|
+ ActiveRecord::Base.establish_connection(db_config.config)
+ db_namespace["migrate"].invoke
+ rescue ActiveRecord::NoDatabaseError
+ db_namespace["setup"].invoke
+ end
+ end
- desc 'Loads the seed data from db/seeds.rb'
- task :seed do
- db_namespace['abort_if_pending_migrations'].invoke
+ desc "Loads the seed data from db/seeds.rb"
+ task seed: :load_config do
+ db_namespace["abort_if_pending_migrations"].invoke
ActiveRecord::Tasks::DatabaseTasks.load_seed
end
+ namespace :seed do
+ desc "Truncates tables of each database for current environment and loads the seeds"
+ task replant: [:load_config, :truncate_all, :seed]
+ end
+
namespace :fixtures do
desc "Loads fixtures into the current environment's database. Load specific fixtures using FIXTURES=x,y. Load from subdirectory in test/fixtures using FIXTURES_DIR=z. Specify an alternative path (eg. spec/fixtures) using FIXTURES_PATH=spec/fixtures."
- task :load => [:environment, :load_config] do
- require 'active_record/fixtures'
+ task load: :load_config do
+ require "active_record/fixtures"
base_dir = ActiveRecord::Tasks::DatabaseTasks.fixtures_path
- fixtures_dir = if ENV['FIXTURES_DIR']
- File.join base_dir, ENV['FIXTURES_DIR']
- else
- base_dir
- end
+ fixtures_dir = if ENV["FIXTURES_DIR"]
+ File.join base_dir, ENV["FIXTURES_DIR"]
+ else
+ base_dir
+ end
- fixture_files = if ENV['FIXTURES']
- ENV['FIXTURES'].split(',')
- else
- # The use of String#[] here is to support namespaced fixtures
- Dir["#{fixtures_dir}/**/*.yml"].map {|f| f[(fixtures_dir.size + 1)..-5] }
- end
+ fixture_files = if ENV["FIXTURES"]
+ ENV["FIXTURES"].split(",")
+ else
+ # The use of String#[] here is to support namespaced fixtures.
+ Dir["#{fixtures_dir}/**/*.yml"].map { |f| f[(fixtures_dir.size + 1)..-5] }
+ end
ActiveRecord::FixtureSet.create_fixtures(fixtures_dir, fixture_files)
end
# desc "Search for a fixture given a LABEL or ID. Specify an alternative path (eg. spec/fixtures) using FIXTURES_PATH=spec/fixtures."
- task :identify => [:environment, :load_config] do
- require 'active_record/fixtures'
+ task identify: :load_config do
+ require "active_record/fixtures"
- label, id = ENV['LABEL'], ENV['ID']
- raise 'LABEL or ID required' if label.blank? && id.blank?
+ label, id = ENV["LABEL"], ENV["ID"]
+ raise "LABEL or ID required" if label.blank? && id.blank?
puts %Q(The fixture ID for "#{label}" is #{ActiveRecord::FixtureSet.identify(label)}.) if label
base_dir = ActiveRecord::Tasks::DatabaseTasks.fixtures_path
Dir["#{base_dir}/**/*.yml"].each do |file|
- if data = YAML::load(ERB.new(IO.read(file)).result)
+ if data = YAML.load(ERB.new(IO.read(file)).result)
data.each_key do |key|
key_id = ActiveRecord::FixtureSet.identify(key)
@@ -245,134 +292,124 @@ db_namespace = namespace :db do
end
namespace :schema do
- desc 'Creates a db/schema.rb file that is portable against any DB supported by Active Record'
- task :dump => [:environment, :load_config] do
- require 'active_record/schema_dumper'
- filename = ENV['SCHEMA'] || File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, 'schema.rb')
- File.open(filename, "w:utf-8") do |file|
- ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection, file)
+ desc "Creates a db/schema.rb file that is portable against any DB supported by Active Record"
+ task dump: :load_config do
+ require "active_record/schema_dumper"
+ ActiveRecord::Base.configurations.configs_for(env_name: Rails.env).each do |db_config|
+ filename = ActiveRecord::Tasks::DatabaseTasks.dump_filename(db_config.spec_name, :ruby)
+ File.open(filename, "w:utf-8") do |file|
+ ActiveRecord::Base.establish_connection(db_config.config)
+ ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection, file)
+ end
end
- db_namespace['schema:dump'].reenable
+
+ db_namespace["schema:dump"].reenable
end
- desc 'Loads a schema.rb file into the database'
- task :load => [:environment, :load_config] do
- ActiveRecord::Tasks::DatabaseTasks.load_schema_current(:ruby, ENV['SCHEMA'])
+ desc "Loads a schema.rb file into the database"
+ task load: [:load_config, :check_protected_environments] do
+ ActiveRecord::Tasks::DatabaseTasks.load_schema_current(:ruby, ENV["SCHEMA"])
end
- task :load_if_ruby => ['db:create', :environment] do
+ task load_if_ruby: ["db:create", :environment] do
db_namespace["schema:load"].invoke if ActiveRecord::Base.schema_format == :ruby
end
namespace :cache do
- desc 'Creates a db/schema_cache.dump file.'
- task :dump => [:environment, :load_config] do
- con = ActiveRecord::Base.connection
- filename = File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, "schema_cache.dump")
-
- con.schema_cache.clear!
- con.data_sources.each { |table| con.schema_cache.add(table) }
- open(filename, 'wb') { |f| f.write(Marshal.dump(con.schema_cache)) }
+ desc "Creates a db/schema_cache.yml file."
+ task dump: :load_config do
+ ActiveRecord::Base.configurations.configs_for(env_name: Rails.env).each do |db_config|
+ ActiveRecord::Base.establish_connection(db_config.config)
+ filename = ActiveRecord::Tasks::DatabaseTasks.cache_dump_filename(db_config.spec_name)
+ ActiveRecord::Tasks::DatabaseTasks.dump_schema_cache(
+ ActiveRecord::Base.connection,
+ filename,
+ )
+ end
end
- desc 'Clears a db/schema_cache.dump file.'
- task :clear => [:environment, :load_config] do
- filename = File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, "schema_cache.dump")
- FileUtils.rm(filename) if File.exist?(filename)
+ desc "Clears a db/schema_cache.yml file."
+ task clear: :load_config do
+ ActiveRecord::Base.configurations.configs_for(env_name: Rails.env).each do |db_config|
+ filename = ActiveRecord::Tasks::DatabaseTasks.cache_dump_filename(db_config.spec_name)
+ rm_f filename, verbose: false
+ end
end
end
-
end
namespace :structure do
- desc 'Dumps the database structure to db/structure.sql. Specify another file with SCHEMA=db/my_structure.sql'
- task :dump => [:environment, :load_config] do
- filename = ENV['SCHEMA'] || File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, "structure.sql")
- current_config = ActiveRecord::Tasks::DatabaseTasks.current_config
- ActiveRecord::Tasks::DatabaseTasks.structure_dump(current_config, filename)
-
- if ActiveRecord::Base.connection.supports_migrations? &&
- ActiveRecord::SchemaMigration.table_exists?
- File.open(filename, "a") do |f|
- f.puts ActiveRecord::Base.connection.dump_schema_information
- f.print "\n"
+ desc "Dumps the database structure to db/structure.sql. Specify another file with SCHEMA=db/my_structure.sql"
+ task dump: :load_config do
+ ActiveRecord::Base.configurations.configs_for(env_name: Rails.env).each do |db_config|
+ ActiveRecord::Base.establish_connection(db_config.config)
+ filename = ActiveRecord::Tasks::DatabaseTasks.dump_filename(db_config.spec_name, :sql)
+ ActiveRecord::Tasks::DatabaseTasks.structure_dump(db_config.config, filename)
+ if ActiveRecord::SchemaMigration.table_exists?
+ File.open(filename, "a") do |f|
+ f.puts ActiveRecord::Base.connection.dump_schema_information
+ f.print "\n"
+ end
end
end
- db_namespace['structure:dump'].reenable
+
+ db_namespace["structure:dump"].reenable
end
desc "Recreates the databases from the structure.sql file"
- task :load => [:environment, :load_config] do
- ActiveRecord::Tasks::DatabaseTasks.load_schema_current(:sql, ENV['SCHEMA'])
+ task load: [:load_config, :check_protected_environments] do
+ ActiveRecord::Tasks::DatabaseTasks.load_schema_current(:sql, ENV["SCHEMA"])
end
- task :load_if_sql => ['db:create', :environment] do
+ task load_if_sql: ["db:create", :environment] do
db_namespace["structure:load"].invoke if ActiveRecord::Base.schema_format == :sql
end
end
namespace :test do
-
- task :deprecated do
- Rake.application.top_level_tasks.grep(/^db:test:/).each do |task|
- $stderr.puts "WARNING: #{task} is deprecated. The Rails test helper now maintains " \
- "your test schema automatically, see the release notes for details."
- end
- end
-
# desc "Recreate the test database from the current schema"
- task :load => %w(db:test:purge) do
+ task load: %w(db:test:purge) do
case ActiveRecord::Base.schema_format
- when :ruby
- db_namespace["test:load_schema"].invoke
- when :sql
- db_namespace["test:load_structure"].invoke
+ when :ruby
+ db_namespace["test:load_schema"].invoke
+ when :sql
+ db_namespace["test:load_structure"].invoke
end
end
# desc "Recreate the test database from an existent schema.rb file"
- task :load_schema => %w(db:test:purge) do
- begin
- should_reconnect = ActiveRecord::Base.connection_pool.active_connection?
- ActiveRecord::Schema.verbose = false
- ActiveRecord::Tasks::DatabaseTasks.load_schema ActiveRecord::Base.configurations['test'], :ruby, ENV['SCHEMA']
- ensure
- if should_reconnect
- ActiveRecord::Base.establish_connection(ActiveRecord::Base.configurations[ActiveRecord::Tasks::DatabaseTasks.env])
- end
+ task load_schema: %w(db:test:purge) do
+ should_reconnect = ActiveRecord::Base.connection_pool.active_connection?
+ ActiveRecord::Schema.verbose = false
+ ActiveRecord::Base.configurations.configs_for(env_name: "test").each do |db_config|
+ filename = ActiveRecord::Tasks::DatabaseTasks.dump_filename(db_config.spec_name, :ruby)
+ ActiveRecord::Tasks::DatabaseTasks.load_schema(db_config.config, :ruby, filename, "test")
+ end
+ ensure
+ if should_reconnect
+ ActiveRecord::Base.establish_connection(ActiveRecord::Base.configurations.default_hash(ActiveRecord::Tasks::DatabaseTasks.env))
end
end
# desc "Recreate the test database from an existent structure.sql file"
- task :load_structure => %w(db:test:purge) do
- ActiveRecord::Tasks::DatabaseTasks.load_schema ActiveRecord::Base.configurations['test'], :sql, ENV['SCHEMA']
- end
-
- # desc "Recreate the test database from a fresh schema"
- task :clone => %w(db:test:deprecated environment) do
- case ActiveRecord::Base.schema_format
- when :ruby
- db_namespace["test:clone_schema"].invoke
- when :sql
- db_namespace["test:clone_structure"].invoke
+ task load_structure: %w(db:test:purge) do
+ ActiveRecord::Base.configurations.configs_for(env_name: "test").each do |db_config|
+ filename = ActiveRecord::Tasks::DatabaseTasks.dump_filename(db_config.spec_name, :sql)
+ ActiveRecord::Tasks::DatabaseTasks.load_schema(db_config.config, :sql, filename, "test")
end
end
- # desc "Recreate the test database from a fresh schema.rb file"
- task :clone_schema => %w(db:test:deprecated db:schema:dump db:test:load_schema)
-
- # desc "Recreate the test database from a fresh structure.sql file"
- task :clone_structure => %w(db:test:deprecated db:structure:dump db:test:load_structure)
-
# desc "Empty the test database"
- task :purge => %w(environment load_config check_protected_environments) do
- ActiveRecord::Tasks::DatabaseTasks.purge ActiveRecord::Base.configurations['test']
+ task purge: %w(load_config check_protected_environments) do
+ ActiveRecord::Base.configurations.configs_for(env_name: "test").each do |db_config|
+ ActiveRecord::Tasks::DatabaseTasks.purge(db_config.config)
+ end
end
# desc 'Load the test schema'
- task :prepare => %w(environment load_config) do
+ task prepare: :load_config do
unless ActiveRecord::Base.configurations.blank?
- db_namespace['test:load'].invoke
+ db_namespace["test:load"].invoke
end
end
end
@@ -381,15 +418,19 @@ end
namespace :railties do
namespace :install do
# desc "Copies missing migrations from Railties (e.g. engines). You can specify Railties to use with FROM=railtie1,railtie2"
- task :migrations => :'db:load_config' do
- to_load = ENV['FROM'].blank? ? :all : ENV['FROM'].split(",").map(&:strip)
+ task migrations: :'db:load_config' do
+ to_load = ENV["FROM"].blank? ? :all : ENV["FROM"].split(",").map(&:strip)
railties = {}
Rails.application.migration_railties.each do |railtie|
next unless to_load == :all || to_load.include?(railtie.railtie_name)
- if railtie.respond_to?(:paths) && (path = railtie.paths['db/migrate'].first)
+ if railtie.respond_to?(:paths) && (path = railtie.paths["db/migrate"].first)
railties[railtie.railtie_name] = path
end
+
+ unless ENV["MIGRATIONS_PATH"].blank?
+ railties[railtie.railtie_name] = railtie.root + ENV["MIGRATIONS_PATH"]
+ end
end
on_skip = Proc.new do |name, migration|
@@ -401,7 +442,7 @@ namespace :railties do
end
ActiveRecord::Migration.copy(ActiveRecord::Tasks::DatabaseTasks.migrations_paths.first, railties,
- :on_skip => on_skip, :on_copy => on_copy)
+ on_skip: on_skip, on_copy: on_copy)
end
end
end
diff --git a/activerecord/lib/active_record/railties/jdbcmysql_error.rb b/activerecord/lib/active_record/railties/jdbcmysql_error.rb
deleted file mode 100644
index 6a38211bff..0000000000
--- a/activerecord/lib/active_record/railties/jdbcmysql_error.rb
+++ /dev/null
@@ -1,16 +0,0 @@
-#FIXME Remove if ArJdbcMysql will give.
-module ArJdbcMySQL #:nodoc:
- class Error < StandardError #:nodoc:
- attr_accessor :error_number, :sql_state
-
- def initialize msg
- super
- @error_number = nil
- @sql_state = nil
- end
-
- # Mysql gem compatibility
- alias_method :errno, :error_number
- alias_method :error, :message
- end
-end
diff --git a/activerecord/lib/active_record/readonly_attributes.rb b/activerecord/lib/active_record/readonly_attributes.rb
index ce78f1756d..7bc26993d5 100644
--- a/activerecord/lib/active_record/readonly_attributes.rb
+++ b/activerecord/lib/active_record/readonly_attributes.rb
@@ -1,22 +1,23 @@
+# frozen_string_literal: true
+
module ActiveRecord
module ReadonlyAttributes
extend ActiveSupport::Concern
included do
- class_attribute :_attr_readonly, instance_accessor: false
- self._attr_readonly = []
+ class_attribute :_attr_readonly, instance_accessor: false, default: []
end
module ClassMethods
# Attributes listed as readonly will be used to create a new record but update operations will
# ignore these fields.
def attr_readonly(*attributes)
- self._attr_readonly = Set.new(attributes.map(&:to_s)) + (self._attr_readonly || [])
+ self._attr_readonly = Set.new(attributes.map(&:to_s)) + (_attr_readonly || [])
end
# Returns an array of all the attributes that have been specified as readonly.
def readonly_attributes
- self._attr_readonly
+ _attr_readonly
end
end
end
diff --git a/activerecord/lib/active_record/reflection.rb b/activerecord/lib/active_record/reflection.rb
index f8dffce2f1..3452cf971b 100644
--- a/activerecord/lib/active_record/reflection.rb
+++ b/activerecord/lib/active_record/reflection.rb
@@ -1,5 +1,7 @@
-require 'thread'
-require 'active_support/core_ext/string/filters'
+# frozen_string_literal: true
+
+require "active_support/core_ext/string/filters"
+require "concurrent/map"
module ActiveRecord
# = Active Record Reflection
@@ -7,37 +9,41 @@ module ActiveRecord
extend ActiveSupport::Concern
included do
- class_attribute :_reflections, instance_writer: false
- class_attribute :aggregate_reflections, instance_writer: false
- self._reflections = {}
- self.aggregate_reflections = {}
+ class_attribute :_reflections, instance_writer: false, default: {}
+ class_attribute :aggregate_reflections, instance_writer: false, default: {}
end
- def self.create(macro, name, scope, options, ar)
- klass = case macro
- when :composed_of
- AggregateReflection
- when :has_many
- HasManyReflection
- when :has_one
- HasOneReflection
- when :belongs_to
- BelongsToReflection
- else
- raise "Unsupported Macro: #{macro}"
- end
-
- reflection = klass.new(name, scope, options, ar)
- options[:through] ? ThroughReflection.new(reflection) : reflection
- end
+ class << self
+ def create(macro, name, scope, options, ar)
+ reflection = reflection_class_for(macro).new(name, scope, options, ar)
+ options[:through] ? ThroughReflection.new(reflection) : reflection
+ end
- def self.add_reflection(ar, name, reflection)
- ar.clear_reflections_cache
- ar._reflections = ar._reflections.merge(name.to_s => reflection)
- end
+ def add_reflection(ar, name, reflection)
+ ar.clear_reflections_cache
+ name = name.to_s
+ ar._reflections = ar._reflections.except(name).merge!(name => reflection)
+ end
- def self.add_aggregate_reflection(ar, name, reflection)
- ar.aggregate_reflections = ar.aggregate_reflections.merge(name.to_s => reflection)
+ def add_aggregate_reflection(ar, name, reflection)
+ ar.aggregate_reflections = ar.aggregate_reflections.merge(name.to_s => reflection)
+ end
+
+ private
+ def reflection_class_for(macro)
+ case macro
+ when :composed_of
+ AggregateReflection
+ when :has_many
+ HasManyReflection
+ when :has_one
+ HasOneReflection
+ when :belongs_to
+ BelongsToReflection
+ else
+ raise "Unsupported Macro: #{macro}"
+ end
+ end
end
# \Reflection enables the ability to examine the associations and aggregations of
@@ -135,8 +141,8 @@ module ActiveRecord
# BelongsToReflection
# HasAndBelongsToManyReflection
# ThroughReflection
- # PolymorphicReflection
- # RuntimeReflection
+ # PolymorphicReflection
+ # RuntimeReflection
class AbstractReflection # :nodoc:
def through_reflection?
false
@@ -152,14 +158,6 @@ module ActiveRecord
klass.new(attributes, &block)
end
- def quoted_table_name
- klass.quoted_table_name
- end
-
- def primary_key_type
- klass.type_for_attribute(klass.primary_key)
- end
-
# Returns the class name for the macro.
#
# <tt>composed_of :balance, class_name: 'Money'</tt> returns <tt>'Money'</tt>
@@ -170,12 +168,52 @@ module ActiveRecord
JoinKeys = Struct.new(:key, :foreign_key) # :nodoc:
- def join_keys(association_klass)
- JoinKeys.new(foreign_key, active_record_primary_key)
+ def join_keys
+ @join_keys ||= get_join_keys(klass)
+ end
+
+ # Returns a list of scopes that should be applied for this Reflection
+ # object when querying the database.
+ def scopes
+ scope ? [scope] : []
+ end
+
+ def join_scope(table, foreign_table, foreign_klass)
+ predicate_builder = predicate_builder(table)
+ scope_chain_items = join_scopes(table, predicate_builder)
+ klass_scope = klass_join_scope(table, predicate_builder)
+
+ key = join_keys.key
+ foreign_key = join_keys.foreign_key
+
+ klass_scope.where!(table[key].eq(foreign_table[foreign_key]))
+
+ if type
+ klass_scope.where!(type => foreign_klass.polymorphic_name)
+ end
+
+ if klass.finder_needs_type_condition?
+ klass_scope.where!(klass.send(:type_condition, table))
+ end
+
+ scope_chain_items.inject(klass_scope, &:merge!)
+ end
+
+ def join_scopes(table, predicate_builder) # :nodoc:
+ if scope
+ [scope_for(build_scope(table, predicate_builder))]
+ else
+ []
+ end
+ end
+
+ def klass_join_scope(table, predicate_builder) # :nodoc:
+ relation = build_scope(table, predicate_builder)
+ klass.scope_for_association(relation)
end
def constraints
- scope_chain.flatten
+ chain.flat_map(&:scopes)
end
def counter_cache_column
@@ -247,6 +285,40 @@ module ActiveRecord
def chain
collect_join_chain
end
+
+ def get_join_keys(association_klass)
+ JoinKeys.new(join_primary_key(association_klass), join_foreign_key)
+ end
+
+ def build_scope(table, predicate_builder = predicate_builder(table))
+ Relation.create(
+ klass,
+ table: table,
+ predicate_builder: predicate_builder
+ )
+ end
+
+ def join_primary_key(*)
+ foreign_key
+ end
+
+ def join_foreign_key
+ active_record_primary_key
+ end
+
+ protected
+ def actual_source_reflection # FIXME: this is a horrible name
+ self
+ end
+
+ private
+ def predicate_builder(table)
+ PredicateBuilder.new(TableMetadata.new(klass, table))
+ end
+
+ def primary_key(klass)
+ klass.primary_key || raise(UnknownPrimaryKey.new(klass))
+ end
end
# Base class for AggregateReflection and AssociationReflection. Objects of
@@ -281,7 +353,6 @@ module ActiveRecord
end
def autosave=(autosave)
- @automatic_inverse_of = false
@options[:autosave] = autosave
parent_reflection = self.parent_reflection
if parent_reflection
@@ -293,6 +364,17 @@ module ActiveRecord
#
# <tt>composed_of :balance, class_name: 'Money'</tt> returns the Money class
# <tt>has_many :clients</tt> returns the Client class
+ #
+ # class Company < ActiveRecord::Base
+ # has_many :clients
+ # end
+ #
+ # Company.reflect_on_association(:clients).klass
+ # # => Client
+ #
+ # <b>Note:</b> Do not call +klass.new+ or +klass.create+ to instantiate
+ # a new association object. Use +build_association+ or +create_association+
+ # instead. This allows plugins to hook into association object creation.
def klass
@klass ||= compute_class(class_name)
end
@@ -311,14 +393,17 @@ module ActiveRecord
active_record == other_aggregation.active_record
end
+ def scope_for(relation, owner = nil)
+ relation.instance_exec(owner, &scope) || relation
+ end
+
private
def derive_class_name
name.to_s.camelize
end
end
-
- # Holds all the meta-data about an aggregation as it was specified in the
+ # Holds all the metadata about an aggregation as it was specified in the
# Active Record class.
class AggregateReflection < MacroReflection #:nodoc:
def mapping
@@ -327,26 +412,13 @@ module ActiveRecord
end
end
- # Holds all the meta-data about an association as it was specified in the
+ # Holds all the metadata about an association as it was specified in the
# Active Record class.
class AssociationReflection < MacroReflection #:nodoc:
- # Returns the target association's class.
- #
- # class Author < ActiveRecord::Base
- # has_many :books
- # end
- #
- # Author.reflect_on_association(:books).klass
- # # => Book
- #
- # <b>Note:</b> Do not call +klass.new+ or +klass.create+ to instantiate
- # a new association object. Use +build_association+ or +create_association+
- # instead. This allows plugins to hook into association object creation.
- def klass
- @klass ||= compute_class(class_name)
- end
-
def compute_class(name)
+ if polymorphic?
+ raise ArgumentError, "Polymorphic associations do not support computing the class."
+ end
active_record.send(:compute_type, name)
end
@@ -355,22 +427,22 @@ module ActiveRecord
def initialize(name, scope, options, active_record)
super
- @automatic_inverse_of = nil
@type = options[:as] && (options[:foreign_type] || "#{options[:as]}_type")
- @foreign_type = options[:foreign_type] || "#{name}_type"
+ @foreign_type = options[:polymorphic] && (options[:foreign_type] || "#{name}_type")
@constructable = calculate_constructable(macro, options)
- @association_scope_cache = {}
- @scope_lock = Mutex.new
+ @association_scope_cache = Concurrent::Map.new
+
+ if options[:class_name] && options[:class_name].class == Class
+ raise ArgumentError, "A class was passed to `:class_name` but we are expecting a string."
+ end
end
- def association_scope_cache(conn, owner)
+ def association_scope_cache(conn, owner, &block)
key = conn.prepared_statements
if polymorphic?
key = [key, owner._read_attribute(@foreign_type)]
end
- @association_scope_cache[key] ||= @scope_lock.synchronize {
- @association_scope_cache[key] ||= yield
- }
+ @association_scope_cache.compute_if_absent(key) { StatementCache.create(conn, &block) }
end
def constructable? # :nodoc:
@@ -416,7 +488,7 @@ module ActiveRecord
alias :check_eager_loadable! :check_preloadable!
def join_id_for(owner) # :nodoc:
- owner[active_record_primary_key]
+ owner[join_foreign_key]
end
def through_reflection
@@ -443,12 +515,6 @@ module ActiveRecord
false
end
- # An array of arrays of scopes. Each item in the outside array corresponds to a reflection
- # in the #chain.
- def scope_chain
- scope ? [[scope]] : [[]]
- end
-
def has_scope?
scope
end
@@ -505,7 +571,7 @@ module ActiveRecord
end
VALID_AUTOMATIC_INVERSE_MACROS = [:has_many, :has_one, :belongs_to]
- INVALID_AUTOMATIC_INVERSE_OPTIONS = [:conditions, :through, :polymorphic, :foreign_key]
+ INVALID_AUTOMATIC_INVERSE_OPTIONS = [:through, :foreign_key]
def add_as_source(seed)
seed
@@ -519,11 +585,9 @@ module ActiveRecord
seed + [self]
end
- protected
-
- def actual_source_reflection # FIXME: this is a horrible name
- self
- end
+ def extensions
+ Array(options[:extend])
+ end
private
@@ -533,18 +597,16 @@ module ActiveRecord
# Attempts to find the inverse association name automatically.
# If it cannot find a suitable inverse association name, it returns
- # nil.
+ # +nil+.
def inverse_name
- options.fetch(:inverse_of) do
- if @automatic_inverse_of == false
- nil
- else
- @automatic_inverse_of ||= automatic_inverse_of
- end
+ unless defined?(@inverse_name)
+ @inverse_name = options.fetch(:inverse_of) { automatic_inverse_of }
end
+
+ @inverse_name
end
- # returns either false or the inverse association name that it finds.
+ # returns either +nil+ or the inverse association name that it finds.
def automatic_inverse_of
if can_find_inverse_of_automatically?(self)
inverse_name = ActiveSupport::Inflector.underscore(options[:as] || active_record.name.demodulize).to_sym
@@ -561,20 +623,15 @@ module ActiveRecord
return inverse_name
end
end
-
- false
end
# Checks if the inverse reflection that is returned from the
# +automatic_inverse_of+ method is a valid reflection. We must
# make sure that the reflection's active_record name matches up
# with the current reflection's klass name.
- #
- # Note: klass will always be valid because when there's a NameError
- # from calling +klass+, +reflection+ will already be set to false.
def valid_inverse_reflection?(reflection)
reflection &&
- klass.name == reflection.active_record.name &&
+ klass <= reflection.active_record &&
can_find_inverse_of_automatically?(reflection)
end
@@ -582,9 +639,8 @@ module ActiveRecord
# us from being able to guess the inverse automatically. First, the
# <tt>inverse_of</tt> option cannot be set to false. Second, we must
# have <tt>has_many</tt>, <tt>has_one</tt>, <tt>belongs_to</tt> associations.
- # Third, we must not have options such as <tt>:polymorphic</tt> or
- # <tt>:foreign_key</tt> which prevent us from correctly guessing the
- # inverse association.
+ # Third, we must not have options such as <tt>:foreign_key</tt>
+ # which prevent us from correctly guessing the inverse association.
#
# Anything with a scope can additionally ruin our attempt at finding an
# inverse, so we exclude reflections with scopes.
@@ -614,10 +670,6 @@ module ActiveRecord
def derive_join_table
ModelSchema.derive_join_table_name active_record.table_name, klass.table_name
end
-
- def primary_key(klass)
- klass.primary_key || raise(UnknownPrimaryKey.new(klass))
- end
end
class HasManyReflection < AssociationReflection # :nodoc:
@@ -632,6 +684,10 @@ module ActiveRecord
Associations::HasManyAssociation
end
end
+
+ def association_primary_key(klass = nil)
+ primary_key(klass || self.klass)
+ end
end
class HasOneReflection < AssociationReflection # :nodoc:
@@ -667,16 +723,18 @@ module ActiveRecord
end
end
- def join_keys(association_klass)
- key = polymorphic? ? association_primary_key(association_klass) : association_primary_key
- JoinKeys.new(key, foreign_key)
+ def join_primary_key(klass = nil)
+ polymorphic? ? association_primary_key(klass) : association_primary_key
end
- def join_id_for(owner) # :nodoc:
- owner[foreign_key]
+ def join_foreign_key
+ foreign_key
end
private
+ def can_find_inverse_of_automatically?(_)
+ !polymorphic? && super
+ end
def calculate_constructable(macro, options)
!polymorphic?
@@ -684,10 +742,6 @@ module ActiveRecord
end
class HasAndBelongsToManyReflection < AssociationReflection # :nodoc:
- def initialize(name, scope, options, active_record)
- super
- end
-
def macro; :has_and_belongs_to_many; end
def collection?
@@ -695,16 +749,15 @@ module ActiveRecord
end
end
- # Holds all the meta-data about a :through association as it was specified
+ # Holds all the metadata about a :through association as it was specified
# in the Active Record class.
class ThroughReflection < AbstractReflection #:nodoc:
- attr_reader :delegate_reflection
- delegate :foreign_key, :foreign_type, :association_foreign_key,
- :active_record_primary_key, :type, :to => :source_reflection
+ delegate :foreign_key, :foreign_type, :association_foreign_key, :join_id_for,
+ :active_record_primary_key, :type, :get_join_keys, to: :source_reflection
def initialize(delegate_reflection)
@delegate_reflection = delegate_reflection
- @klass = delegate_reflection.options[:anonymous_class]
+ @klass = delegate_reflection.options[:anonymous_class]
@source_reflection_name = delegate_reflection.options[:source]
end
@@ -782,45 +835,12 @@ module ActiveRecord
through_reflection.clear_association_scope_cache
end
- # Consider the following example:
- #
- # class Person
- # has_many :articles
- # has_many :comment_tags, through: :articles
- # end
- #
- # class Article
- # has_many :comments
- # has_many :comment_tags, through: :comments, source: :tags
- # end
- #
- # class Comment
- # has_many :tags
- # end
- #
- # There may be scopes on Person.comment_tags, Article.comment_tags and/or Comment.tags,
- # but only Comment.tags will be represented in the #chain. So this method creates an array
- # of scopes corresponding to the chain.
- def scope_chain
- @scope_chain ||= begin
- scope_chain = source_reflection.scope_chain.map(&:dup)
-
- # Add to it the scope from this reflection (if any)
- scope_chain.first << scope if scope
-
- through_scope_chain = through_reflection.scope_chain.map(&:dup)
-
- if options[:source_type]
- type = foreign_type
- source_type = options[:source_type]
- through_scope_chain.first << lambda { |object|
- where(type => source_type)
- }
- end
+ def scopes
+ source_reflection.scopes + super
+ end
- # Recursively fill out the rest of the array from the through reflection
- scope_chain + through_scope_chain
- end
+ def join_scopes(table, predicate_builder) # :nodoc:
+ source_reflection.join_scopes(table, predicate_builder) + super
end
def has_scope?
@@ -829,10 +849,6 @@ module ActiveRecord
through_reflection.has_scope?
end
- def join_keys(association_klass)
- source_reflection.join_keys(association_klass)
- end
-
# A through association is nested if there would be more than one join table
def nested?
source_reflection.through_reflection? || through_reflection.through_reflection?
@@ -871,15 +887,13 @@ module ActiveRecord
}
if names.length > 1
- example_options = options.dup
- example_options[:source] = source_reflection_names.first
- ActiveSupport::Deprecation.warn \
- "Ambiguous source reflection for through association. Please " \
- "specify a :source directive on your declaration like:\n" \
- "\n" \
- " class #{active_record.name} < ActiveRecord::Base\n" \
- " #{macro} :#{name}, #{example_options}\n" \
- " end"
+ raise AmbiguousSourceReflectionForThroughAssociation.new(
+ active_record.name,
+ macro,
+ name,
+ options,
+ source_reflection_names
+ )
end
@source_reflection_name = names.first
@@ -893,10 +907,6 @@ module ActiveRecord
through_reflection.options
end
- def join_id_for(owner) # :nodoc:
- source_reflection.join_id_for(owner)
- end
-
def check_validity!
if through_reflection.nil?
raise HasManyThroughAssociationNotFoundError.new(active_record.name, self)
@@ -926,6 +936,14 @@ module ActiveRecord
raise HasOneThroughCantAssociateThroughCollection.new(active_record.name, self, through_reflection)
end
+ if parent_reflection.nil?
+ reflections = active_record.reflections.keys.map(&:to_sym)
+
+ if reflections.index(through_reflection.name) > reflections.index(name)
+ raise HasManyThroughOrderError.new(active_record.name, self, through_reflection)
+ end
+ end
+
check_validity_of_inverse!
end
@@ -947,28 +965,25 @@ module ActiveRecord
collect_join_reflections(seed + [self])
end
- def collect_join_reflections(seed)
- a = source_reflection.add_as_source seed
- if options[:source_type]
- through_reflection.add_as_polymorphic_through self, a
- else
- through_reflection.add_as_through a
- end
- end
-
protected
-
def actual_source_reflection # FIXME: this is a horrible name
- source_reflection.send(:actual_source_reflection)
+ source_reflection.actual_source_reflection
end
- def primary_key(klass)
- klass.primary_key || raise(UnknownPrimaryKey.new(klass))
+ private
+ attr_reader :delegate_reflection
+
+ def collect_join_reflections(seed)
+ a = source_reflection.add_as_source seed
+ if options[:source_type]
+ through_reflection.add_as_polymorphic_through self, a
+ else
+ through_reflection.add_as_through a
+ end
end
def inverse_name; delegate_reflection.send(:inverse_name); end
- private
def derive_class_name
# get the class_name of the belongs_to association of the through reflection
options[:source_type] || source_reflection.class_name
@@ -978,52 +993,35 @@ module ActiveRecord
public_instance_methods
delegate(*delegate_methods, to: :delegate_reflection)
-
end
- class PolymorphicReflection < ThroughReflection # :nodoc:
+ class PolymorphicReflection < AbstractReflection # :nodoc:
+ delegate :klass, :scope, :plural_name, :type, :get_join_keys, :scope_for, to: :@reflection
+
def initialize(reflection, previous_reflection)
@reflection = reflection
@previous_reflection = previous_reflection
end
- def klass
- @reflection.klass
- end
-
- def scope
- @reflection.scope
- end
-
- def table_name
- @reflection.table_name
- end
-
- def plural_name
- @reflection.plural_name
- end
-
- def join_keys(association_klass)
- @reflection.join_keys(association_klass)
- end
-
- def type
- @reflection.type
+ def join_scopes(table, predicate_builder) # :nodoc:
+ scopes = @previous_reflection.join_scopes(table, predicate_builder) + super
+ scopes << build_scope(table, predicate_builder).instance_exec(nil, &source_type_scope)
end
def constraints
- @reflection.constraints + [source_type_info]
+ @reflection.constraints + [source_type_scope]
end
- def source_type_info
- type = @previous_reflection.foreign_type
- source_type = @previous_reflection.options[:source_type]
- lambda { |object| where(type => source_type) }
- end
+ private
+ def source_type_scope
+ type = @previous_reflection.foreign_type
+ source_type = @previous_reflection.options[:source_type]
+ lambda { |object| where(type => source_type) }
+ end
end
- class RuntimeReflection < PolymorphicReflection # :nodoc:
- attr_accessor :next
+ class RuntimeReflection < AbstractReflection # :nodoc:
+ delegate :scope, :type, :constraints, :get_join_keys, to: :@reflection
def initialize(reflection, association)
@reflection = reflection
@@ -1034,24 +1032,8 @@ module ActiveRecord
@association.klass
end
- def table_name
- klass.table_name
- end
-
- def constraints
- @reflection.constraints
- end
-
- def source_type_info
- @reflection.source_type_info
- end
-
- def alias_candidate(name)
- "#{plural_name}_#{name}_join"
- end
-
- def alias_name
- Arel::Table.new(table_name)
+ def aliased_table
+ @aliased_table ||= Arel::Table.new(table_name, type_caster: klass.type_caster)
end
def all_includes; yield; end
diff --git a/activerecord/lib/active_record/relation.rb b/activerecord/lib/active_record/relation.rb
index c0ed89fc97..dd821431e1 100644
--- a/activerecord/lib/active_record/relation.rb
+++ b/activerecord/lib/active_record/relation.rb
@@ -1,16 +1,17 @@
-require "arel/collectors/bind"
+# frozen_string_literal: true
module ActiveRecord
# = Active Record \Relation
class Relation
MULTI_VALUE_METHODS = [:includes, :eager_load, :preload, :select, :group,
- :order, :joins, :left_joins, :left_outer_joins, :references,
- :extending, :unscope]
+ :order, :joins, :left_outer_joins, :references,
+ :extending, :unscope, :optimizer_hints, :annotate]
SINGLE_VALUE_METHODS = [:limit, :offset, :lock, :readonly, :reordering,
- :reverse_order, :distinct, :create_with]
+ :reverse_order, :distinct, :create_with, :skip_query_cache]
+
CLAUSE_METHODS = [:where, :having, :from]
- INVALID_METHODS_FOR_DELETE_ALL = [:limit, :distinct, :offset, :group, :having]
+ INVALID_METHODS_FOR_DELETE_ALL = [:distinct, :group, :having]
VALUE_METHODS = MULTI_VALUE_METHODS + SINGLE_VALUE_METHODS + CLAUSE_METHODS
@@ -18,95 +19,39 @@ module ActiveRecord
include FinderMethods, Calculations, SpawnMethods, QueryMethods, Batches, Explain, Delegation
attr_reader :table, :klass, :loaded, :predicate_builder
+ attr_accessor :skip_preloading_value
alias :model :klass
alias :loaded? :loaded
+ alias :locked? :lock_value
- def initialize(klass, table, predicate_builder, values = {})
+ def initialize(klass, table: klass.arel_table, predicate_builder: klass.predicate_builder, values: {})
@klass = klass
@table = table
@values = values
@offsets = {}
@loaded = false
@predicate_builder = predicate_builder
+ @delegate_to_klass = false
end
def initialize_copy(other)
- # This method is a hot spot, so for now, use Hash[] to dup the hash.
- # https://bugs.ruby-lang.org/issues/7166
- @values = Hash[@values]
+ @values = @values.dup
reset
end
- def insert(values) # :nodoc:
- primary_key_value = nil
-
- if primary_key && Hash === values
- primary_key_value = values[values.keys.find { |k|
- k.name == primary_key
- }]
-
- if !primary_key_value && klass.prefetch_primary_key?
- primary_key_value = klass.next_sequence_value
- values[arel_attribute(klass.primary_key)] = primary_key_value
- end
- end
-
- im = arel.create_insert
- im.into @table
-
- substitutes, binds = substitute_values values
-
- if values.empty? # empty insert
- im.values = Arel.sql(connection.empty_insert_statement_value)
- else
- im.insert substitutes
- end
-
- @klass.connection.insert(
- im,
- 'SQL',
- primary_key,
- primary_key_value,
- nil,
- binds)
- end
-
- def _update_record(values, id, id_was) # :nodoc:
- substitutes, binds = substitute_values values
-
- scope = @klass.unscoped
-
- if @klass.finder_needs_type_condition?
- scope.unscope!(where: @klass.inheritance_column)
- end
-
- relation = scope.where(@klass.primary_key => (id_was || id))
- bvs = binds + relation.bound_attributes
- um = relation
- .arel
- .compile_update(substitutes, @klass.primary_key)
-
- @klass.connection.update(
- um,
- 'SQL',
- bvs,
- )
+ def arel_attribute(name) # :nodoc:
+ klass.arel_attribute(name, table)
end
- def substitute_values(values) # :nodoc:
- binds = []
- substitutes = []
-
- values.each do |arel_attr, value|
- binds.push QueryAttribute.new(arel_attr.name, value, klass.type_for_attribute(arel_attr.name))
- substitutes.push [arel_attr, Arel::Nodes::BindParam.new]
+ def bind_attribute(name, value) # :nodoc:
+ if reflection = klass._reflect_on_association(name)
+ name = reflection.foreign_key
+ value = value.read_attribute(reflection.klass.primary_key) unless value.nil?
end
- [substitutes, binds]
- end
-
- def arel_attribute(name) # :nodoc:
- klass.arel_attribute(name, table)
+ attr = arel_attribute(name)
+ bind = predicate_builder.build_bind_attribute(attr.name, value)
+ yield attr, bind
end
# Initializes new record from relation while maintaining the current
@@ -121,8 +66,9 @@ module ActiveRecord
#
# user = users.new { |user| user.name = 'Oscar' }
# user.name # => Oscar
- def new(*args, &block)
- scoping { @klass.new(*args, &block) }
+ def new(attributes = nil, &block)
+ block = _deprecated_scope_block("new", &block)
+ scoping { klass.new(attributes, &block) }
end
alias build new
@@ -146,8 +92,13 @@ module ActiveRecord
#
# users.create(name: nil) # validation on name
# # => #<User id: nil, name: nil, ...>
- def create(*args, &block)
- scoping { @klass.create(*args, &block) }
+ def create(attributes = nil, &block)
+ if attributes.is_a?(Array)
+ attributes.collect { |attr| create(attr, &block) }
+ else
+ block = _deprecated_scope_block("create", &block)
+ scoping { klass.create(attributes, &block) }
+ end
end
# Similar to #create, but calls
@@ -156,8 +107,13 @@ module ActiveRecord
#
# Expects arguments in the same format as
# {ActiveRecord::Base.create!}[rdoc-ref:Persistence::ClassMethods#create!].
- def create!(*args, &block)
- scoping { @klass.create!(*args, &block) }
+ def create!(attributes = nil, &block)
+ if attributes.is_a?(Array)
+ attributes.collect { |attr| create!(attr, &block) }
+ else
+ block = _deprecated_scope_block("create!", &block)
+ scoping { klass.create!(attributes, &block) }
+ end
end
def first_or_create(attributes = nil, &block) # :nodoc:
@@ -203,23 +159,12 @@ module ActiveRecord
# failed due to validation errors it won't be persisted, you get what
# #create returns in such situation.
#
- # Please note *this method is not atomic*, it runs first a SELECT, and if
+ # Please note <b>this method is not atomic</b>, it runs first a SELECT, and if
# there are no results an INSERT is attempted. If there are other threads
# or processes there is a race condition between both calls and it could
# be the case that you end up with two similar records.
#
- # Whether that is a problem or not depends on the logic of the
- # application, but in the particular case in which rows have a UNIQUE
- # constraint an exception may be raised, just retry:
- #
- # begin
- # CreditAccount.transaction(requires_new: true) do
- # CreditAccount.find_or_create_by(user_id: user.id)
- # end
- # rescue ActiveRecord::RecordNotUnique
- # retry
- # end
- #
+ # If this might be a problem for your application, please see #create_or_find_by.
def find_or_create_by(attributes, &block)
find_by(attributes) || create(attributes, &block)
end
@@ -231,6 +176,51 @@ module ActiveRecord
find_by(attributes) || create!(attributes, &block)
end
+ # Attempts to create a record with the given attributes in a table that has a unique constraint
+ # on one or several of its columns. If a row already exists with one or several of these
+ # unique constraints, the exception such an insertion would normally raise is caught,
+ # and the existing record with those attributes is found using #find_by!.
+ #
+ # This is similar to #find_or_create_by, but avoids the problem of stale reads between the SELECT
+ # and the INSERT, as that method needs to first query the table, then attempt to insert a row
+ # if none is found.
+ #
+ # There are several drawbacks to #create_or_find_by, though:
+ #
+ # * The underlying table must have the relevant columns defined with unique constraints.
+ # * A unique constraint violation may be triggered by only one, or at least less than all,
+ # of the given attributes. This means that the subsequent #find_by! may fail to find a
+ # matching record, which will then raise an <tt>ActiveRecord::RecordNotFound</tt> exception,
+ # rather than a record with the given attributes.
+ # * While we avoid the race condition between SELECT -> INSERT from #find_or_create_by,
+ # we actually have another race condition between INSERT -> SELECT, which can be triggered
+ # if a DELETE between those two statements is run by another client. But for most applications,
+ # that's a significantly less likely condition to hit.
+ # * It relies on exception handling to handle control flow, which may be marginally slower.
+ # * The primary key may auto-increment on each create, even if it fails. This can accelerate
+ # the problem of running out of integers, if the underlying table is still stuck on a primary
+ # key of type int (note: All Rails apps since 5.1+ have defaulted to bigint, which is not liable
+ # to this problem).
+ #
+ # This method will return a record if all given attributes are covered by unique constraints
+ # (unless the INSERT -> DELETE -> SELECT race condition is triggered), but if creation was attempted
+ # and failed due to validation errors it won't be persisted, you get what #create returns in
+ # such situation.
+ def create_or_find_by(attributes, &block)
+ transaction(requires_new: true) { create(attributes, &block) }
+ rescue ActiveRecord::RecordNotUnique
+ find_by!(attributes)
+ end
+
+ # Like #create_or_find_by, but calls
+ # {create!}[rdoc-ref:Persistence::ClassMethods#create!] so an exception
+ # is raised if the created record is invalid.
+ def create_or_find_by!(attributes, &block)
+ transaction(requires_new: true) { create!(attributes, &block) }
+ rescue ActiveRecord::RecordNotUnique
+ find_by!(attributes)
+ end
+
# Like #find_or_create_by, but calls {new}[rdoc-ref:Core#new]
# instead of {create}[rdoc-ref:Persistence::ClassMethods#create].
def find_or_initialize_by(attributes, &block)
@@ -245,16 +235,16 @@ module ActiveRecord
# are needed by the next ones when eager loading is going on.
#
# Please see further details in the
- # {Active Record Query Interface guide}[http://guides.rubyonrails.org/active_record_querying.html#running-explain].
+ # {Active Record Query Interface guide}[https://guides.rubyonrails.org/active_record_querying.html#running-explain].
def explain
- #TODO: Fix for binds.
exec_explain(collecting_queries_for_explain { exec_queries })
end
# Converts relation objects to Array.
- def to_a
+ def to_ary
records.dup
end
+ alias to_a to_ary
def records # :nodoc:
load
@@ -266,10 +256,6 @@ module ActiveRecord
coder.represent_seq(nil, records)
end
- def as_json(options = nil) #:nodoc:
- records.as_json(options)
- end
-
# Returns size of the records.
def size
loaded? ? @records.length : count(:all)
@@ -278,13 +264,7 @@ module ActiveRecord
# Returns true if there are no records.
def empty?
return @records.empty? if loaded?
-
- if limit_value == 0
- true
- else
- c = count(:all)
- c.respond_to?(:zero?) ? c.zero? : c.empty?
- end
+ !exists?
end
# Returns true if there are no records.
@@ -347,10 +327,14 @@ module ActiveRecord
# Please check unscoped if you want to remove all previous scopes (including
# the default_scope) during the execution of a block.
def scoping
- previous, klass.current_scope = klass.current_scope, self
- yield
+ already_in_scope? ? yield : _scoping(self) { yield }
+ end
+
+ def _exec_scope(name, *args, &block) # :nodoc:
+ @delegate_to_klass = true
+ _scoping(_deprecated_spawn(name)) { instance_exec(*args, &block) || self }
ensure
- klass.current_scope = previous
+ @delegate_to_klass = false
end
# Updates all records in the current relation with details given. This method constructs a single SQL UPDATE
@@ -358,6 +342,8 @@ module ActiveRecord
# trigger Active Record callbacks or validations. However, values passed to #update_all will still go through
# Active Record's normal type casting and serialization.
#
+ # Note: As Active Record callbacks are not triggered, this method will not automatically update +updated_at+/+updated_on+ columns.
+ #
# ==== Parameters
#
# * +updates+ - A string, array, or hash representing the SET part of an SQL statement.
@@ -372,69 +358,91 @@ module ActiveRecord
#
# # Update all books that match conditions, but limit it to 5 ordered by date
# Book.where('title LIKE ?', '%Rails%').order(:created_at).limit(5).update_all(author: 'David')
+ #
+ # # Update all invoices and set the number column to its id value.
+ # Invoice.update_all('number = id')
def update_all(updates)
raise ArgumentError, "Empty list of attributes to change" if updates.blank?
+ if eager_loading?
+ relation = apply_join_dependency
+ return relation.update_all(updates)
+ end
+
stmt = Arel::UpdateManager.new
+ stmt.table(arel.join_sources.empty? ? table : arel.source)
+ stmt.key = arel_attribute(primary_key)
+ stmt.take(arel.limit)
+ stmt.offset(arel.offset)
+ stmt.order(*arel.orders)
+ stmt.wheres = arel.constraints
+
+ if updates.is_a?(Hash)
+ if klass.locking_enabled? &&
+ !updates.key?(klass.locking_column) &&
+ !updates.key?(klass.locking_column.to_sym)
+ attr = arel_attribute(klass.locking_column)
+ updates[attr.name] = _increment_attribute(attr)
+ end
+ stmt.set _substitute_values(updates)
+ else
+ stmt.set Arel.sql(klass.sanitize_sql_for_assignment(updates, table.name))
+ end
- stmt.set Arel.sql(@klass.send(:sanitize_sql_for_assignment, updates))
- stmt.table(table)
+ @klass.connection.update stmt, "#{@klass} Update All"
+ end
- if joins_values.any?
- @klass.connection.join_to_update(stmt, arel, arel_attribute(primary_key))
+ def update(id = :all, attributes) # :nodoc:
+ if id == :all
+ each { |record| record.update(attributes) }
else
- stmt.key = arel_attribute(primary_key)
- stmt.take(arel.limit)
- stmt.order(*arel.orders)
- stmt.wheres = arel.constraints
+ klass.update(id, attributes)
+ end
+ end
+
+ def update_counters(counters) # :nodoc:
+ touch = counters.delete(:touch)
+
+ updates = {}
+ counters.each do |counter_name, value|
+ attr = arel_attribute(counter_name)
+ updates[attr.name] = _increment_attribute(attr, value)
end
- @klass.connection.update stmt, 'SQL', bound_attributes
+ if touch
+ names = touch if touch != true
+ touch_updates = klass.touch_attributes_with_time(*names)
+ updates.merge!(touch_updates) unless touch_updates.empty?
+ end
+
+ update_all updates
end
- # Updates an object (or multiple objects) and saves it to the database, if validations pass.
- # The resulting object is returned whether the object was saved successfully to the database or not.
+ # Touches all records in the current relation without instantiating records first with the +updated_at+/+updated_on+ attributes
+ # set to the current time or the time specified.
+ # This method can be passed attribute names and an optional time argument.
+ # If attribute names are passed, they are updated along with +updated_at+/+updated_on+ attributes.
+ # If no time argument is passed, the current time is used as default.
#
- # ==== Parameters
+ # === Examples
#
- # * +id+ - This should be the id or an array of ids to be updated.
- # * +attributes+ - This should be a hash of attributes or an array of hashes.
+ # # Touch all records
+ # Person.all.touch_all
+ # # => "UPDATE \"people\" SET \"updated_at\" = '2018-01-04 22:55:23.132670'"
#
- # ==== Examples
+ # # Touch multiple records with a custom attribute
+ # Person.all.touch_all(:created_at)
+ # # => "UPDATE \"people\" SET \"updated_at\" = '2018-01-04 22:55:23.132670', \"created_at\" = '2018-01-04 22:55:23.132670'"
#
- # # Updates one record
- # Person.update(15, user_name: 'Samuel', group: 'expert')
- #
- # # Updates multiple records
- # people = { 1 => { "first_name" => "David" }, 2 => { "first_name" => "Jeremy" } }
- # Person.update(people.keys, people.values)
- #
- # # Updates multiple records from the result of a relation
- # people = Person.where(group: 'expert')
- # people.update(group: 'masters')
- #
- # Note: Updating a large number of records will run an
- # UPDATE query for each record, which may cause a performance
- # issue. So if it is not needed to run callbacks for each update, it is
- # preferred to use #update_all for updating all records using
- # a single query.
- def update(id = :all, attributes)
- if id.is_a?(Array)
- id.map.with_index { |one_id, idx| update(one_id, attributes[idx]) }
- elsif id == :all
- records.each { |record| record.update(attributes) }
- else
- if ActiveRecord::Base === id
- id = id.id
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
- You are passing an instance of ActiveRecord::Base to `update`.
- Please pass the id of the object by calling `.id`
- MSG
- end
- object = find(id)
- object.update(attributes)
- object
- end
+ # # Touch multiple records with a specified time
+ # Person.all.touch_all(time: Time.new(2020, 5, 16, 0, 0, 0))
+ # # => "UPDATE \"people\" SET \"updated_at\" = '2020-05-16 00:00:00'"
+ #
+ # # Touch records with scope
+ # Person.where(name: 'David').touch_all
+ # # => "UPDATE \"people\" SET \"updated_at\" = '2018-01-04 22:55:23.132670' WHERE \"people\".\"name\" = 'David'"
+ def touch_all(*names, time: nil)
+ update_all klass.touch_attributes_with_time(*names, time: time)
end
# Destroys the records by instantiating each
@@ -453,43 +461,8 @@ module ActiveRecord
# ==== Examples
#
# Person.where(age: 0..18).destroy_all
- def destroy_all(conditions = nil)
- if conditions
- ActiveSupport::Deprecation.warn(<<-MESSAGE.squish)
- Passing conditions to destroy_all is deprecated and will be removed in Rails 5.1.
- To achieve the same use where(conditions).destroy_all
- MESSAGE
- where(conditions).destroy_all
- else
- records.each(&:destroy).tap { reset }
- end
- end
-
- # Destroy an object (or multiple objects) that has the given id. The object is instantiated first,
- # therefore all callbacks and filters are fired off before the object is deleted. This method is
- # less efficient than #delete but allows cleanup methods and other actions to be run.
- #
- # This essentially finds the object (or multiple objects) with the given id, creates a new object
- # from the attributes, and then calls destroy on it.
- #
- # ==== Parameters
- #
- # * +id+ - Can be either an Integer or an Array of Integers.
- #
- # ==== Examples
- #
- # # Destroy a single object
- # Todo.destroy(1)
- #
- # # Destroy multiple objects
- # todos = [1,2,3]
- # Todo.destroy(todos)
- def destroy(id)
- if id.is_a?(Array)
- id.map { |one_id| destroy(one_id) }
- else
- find(id).destroy
- end
+ def destroy_all
+ records.each(&:destroy).tap { reset }
end
# Deletes the records without instantiating the records
@@ -508,66 +481,60 @@ module ActiveRecord
#
# If an invalid method is supplied, #delete_all raises an ActiveRecordError:
#
- # Post.limit(100).delete_all
- # # => ActiveRecord::ActiveRecordError: delete_all doesn't support limit
- def delete_all(conditions = nil)
- invalid_methods = INVALID_METHODS_FOR_DELETE_ALL.select { |method|
- if MULTI_VALUE_METHODS.include?(method)
- send("#{method}_values").any?
- elsif SINGLE_VALUE_METHODS.include?(method)
- send("#{method}_value")
- elsif CLAUSE_METHODS.include?(method)
- send("#{method}_clause").any?
- end
- }
+ # Post.distinct.delete_all
+ # # => ActiveRecord::ActiveRecordError: delete_all doesn't support distinct
+ def delete_all
+ invalid_methods = INVALID_METHODS_FOR_DELETE_ALL.select do |method|
+ value = get_value(method)
+ SINGLE_VALUE_METHODS.include?(method) ? value : value.any?
+ end
if invalid_methods.any?
raise ActiveRecordError.new("delete_all doesn't support #{invalid_methods.join(', ')}")
end
- if conditions
- ActiveSupport::Deprecation.warn(<<-MESSAGE.squish)
- Passing conditions to delete_all is deprecated and will be removed in Rails 5.1.
- To achieve the same use where(conditions).delete_all
- MESSAGE
- where(conditions).delete_all
- else
- stmt = Arel::DeleteManager.new
- stmt.from(table)
+ if eager_loading?
+ relation = apply_join_dependency
+ return relation.delete_all
+ end
- if joins_values.any?
- @klass.connection.join_to_delete(stmt, arel, arel_attribute(primary_key))
- else
- stmt.wheres = arel.constraints
- end
+ stmt = Arel::DeleteManager.new
+ stmt.from(arel.join_sources.empty? ? table : arel.source)
+ stmt.key = arel_attribute(primary_key)
+ stmt.take(arel.limit)
+ stmt.offset(arel.offset)
+ stmt.order(*arel.orders)
+ stmt.wheres = arel.constraints
- affected = @klass.connection.delete(stmt, 'SQL', bound_attributes)
+ affected = @klass.connection.delete(stmt, "#{@klass} Destroy")
- reset
- affected
- end
+ reset
+ affected
end
- # Deletes the row with a primary key matching the +id+ argument, using a
- # SQL +DELETE+ statement, and returns the number of rows deleted. Active
- # Record objects are not instantiated, so the object's callbacks are not
- # executed, including any <tt>:dependent</tt> association options.
- #
- # You can delete multiple rows at once by passing an Array of <tt>id</tt>s.
+ # Finds and destroys all records matching the specified conditions.
+ # This is short-hand for <tt>relation.where(condition).destroy_all</tt>.
+ # Returns the collection of objects that were destroyed.
#
- # Note: Although it is often much faster than the alternative,
- # #destroy, skipping callbacks might bypass business logic in
- # your application that ensures referential integrity or performs other
- # essential jobs.
+ # If no record is found, returns empty array.
#
- # ==== Examples
+ # Person.destroy_by(id: 13)
+ # Person.destroy_by(name: 'Spartacus', rating: 4)
+ # Person.destroy_by("published_at < ?", 2.weeks.ago)
+ def destroy_by(*args)
+ where(*args).destroy_all
+ end
+
+ # Finds and deletes all records matching the specified conditions.
+ # This is short-hand for <tt>relation.where(condition).delete_all</tt>.
+ # Returns the number of rows affected.
#
- # # Delete a single row
- # Todo.delete(1)
+ # If no record is found, returns <tt>0</tt> as zero rows were affected.
#
- # # Delete multiple rows
- # Todo.delete([2,3,4])
- def delete(id_or_array)
- where(primary_key => id_or_array).delete_all
+ # Person.delete_by(id: 13)
+ # Person.delete_by(name: 'Spartacus', rating: 4)
+ # Person.delete_by("published_at < ?", 2.weeks.ago)
+ def delete_by(*args)
+ where(*args).delete_all
end
# Causes the records to be loaded from the database if they have not
@@ -576,8 +543,8 @@ module ActiveRecord
# return value is the relation itself, not the records.
#
# Post.where(published: true).load # => #<ActiveRecord::Relation>
- def load
- exec_queries unless loaded?
+ def load(&block)
+ exec_queries(&block) unless loaded?
self
end
@@ -589,8 +556,9 @@ module ActiveRecord
end
def reset
- @last = @to_sql = @order_clause = @scope_for_create = @arel = @loaded = nil
- @should_eager_load = @join_dependency = nil
+ @delegate_to_klass = false
+ @_deprecated_scope_source = nil
+ @to_sql = @arel = @loaded = @should_eager_load = nil
@records = [].freeze
@offsets = {}
self
@@ -602,32 +570,28 @@ module ActiveRecord
# # => SELECT "users".* FROM "users" WHERE "users"."name" = 'Oscar'
def to_sql
@to_sql ||= begin
- relation = self
- connection = klass.connection
- visitor = connection.visitor
-
- if eager_loading?
- find_with_associations { |rel| relation = rel }
- end
-
- binds = relation.bound_attributes
- binds = connection.prepare_binds_for_database(binds)
- binds.map! { |value| connection.quote(value) }
- collect = visitor.accept(relation.arel.ast, Arel::Collectors::Bind.new)
- collect.substitute_binds(binds).join
- end
+ if eager_loading?
+ apply_join_dependency do |relation, join_dependency|
+ relation = join_dependency.apply_column_aliases(relation)
+ relation.to_sql
+ end
+ else
+ conn = klass.connection
+ conn.unprepared_statement { conn.to_sql(arel) }
+ end
+ end
end
# Returns a hash of where conditions.
#
# User.where(name: 'Oscar').where_values_hash
# # => {name: "Oscar"}
- def where_values_hash(relation_table_name = table_name)
+ def where_values_hash(relation_table_name = klass.table_name)
where_clause.to_h(relation_table_name)
end
def scope_for_create
- @scope_for_create ||= where_values_hash.merge(create_with_value)
+ where_values_hash.merge!(create_with_value.stringify_keys)
end
# Returns true if relation needs eager loading.
@@ -645,15 +609,6 @@ module ActiveRecord
includes_values & joins_values
end
- # {#uniq}[rdoc-ref:QueryMethods#uniq] and
- # {#uniq!}[rdoc-ref:QueryMethods#uniq!] are silently deprecated.
- # #uniq_value delegates to #distinct_value to maintain backwards compatibility.
- # Use #distinct_value instead.
- def uniq_value
- distinct_value
- end
- deprecate uniq_value: :distinct_value
-
# Compares two relations for equality.
def ==(other)
case other
@@ -667,7 +622,7 @@ module ActiveRecord
end
def pretty_print(q)
- q.pp(self.records)
+ q.pp(records)
end
# Returns true if relation is blank.
@@ -676,17 +631,45 @@ module ActiveRecord
end
def values
- Hash[@values]
+ @values.dup
end
def inspect
- entries = records.take([limit_value, 11].compact.min).map!(&:inspect)
- entries[10] = '...' if entries.size == 11
+ subject = loaded? ? records : self
+ entries = subject.take([limit_value, 11].compact.min).map!(&:inspect)
+
+ entries[10] = "..." if entries.size == 11
"#<#{self.class.name} [#{entries.join(', ')}]>"
end
+ def empty_scope? # :nodoc:
+ @values == klass.unscoped.values
+ end
+
+ def has_limit_or_offset? # :nodoc:
+ limit_value || offset_value
+ end
+
+ def alias_tracker(joins = [], aliases = nil) # :nodoc:
+ joins += [aliases] if aliases
+ ActiveRecord::Associations::AliasTracker.create(connection, table.name, joins)
+ end
+
+ def preload_associations(records) # :nodoc:
+ preload = preload_values
+ preload += includes_values unless eager_loading?
+ preloader = nil
+ preload.each do |associations|
+ preloader ||= build_preloader
+ preloader.preload records, associations
+ end
+ end
+
+ attr_reader :_deprecated_scope_source # :nodoc:
+
protected
+ attr_writer :_deprecated_scope_source # :nodoc:
def load_records(records)
@records = records.freeze
@@ -694,49 +677,111 @@ module ActiveRecord
end
private
+ def already_in_scope?
+ @delegate_to_klass && begin
+ scope = klass.current_scope(true)
+ scope && !scope._deprecated_scope_source
+ end
+ end
- def exec_queries
- @records = eager_loading? ? find_with_associations.freeze : @klass.find_by_sql(arel, bound_attributes).freeze
+ def _deprecated_spawn(name)
+ spawn.tap { |scope| scope._deprecated_scope_source = name }
+ end
- preload = preload_values
- preload += includes_values unless eager_loading?
- preloader = build_preloader
- preload.each do |associations|
- preloader.preload @records, associations
+ def _deprecated_scope_block(name, &block)
+ -> record do
+ klass.current_scope = _deprecated_spawn(name)
+ yield record if block_given?
+ end
end
- @records.each(&:readonly!) if readonly_value
+ def _scoping(scope)
+ previous, klass.current_scope = klass.current_scope(true), scope
+ yield
+ ensure
+ klass.current_scope = previous
+ end
- @loaded = true
- @records
- end
+ def _substitute_values(values)
+ values.map do |name, value|
+ attr = arel_attribute(name)
+ unless Arel.arel_node?(value)
+ type = klass.type_for_attribute(attr.name)
+ value = predicate_builder.build_bind_attribute(attr.name, type.cast(value))
+ end
+ [attr, value]
+ end
+ end
- def build_preloader
- ActiveRecord::Associations::Preloader.new
- end
+ def _increment_attribute(attribute, value = 1)
+ bind = predicate_builder.build_bind_attribute(attribute.name, value.abs)
+ expr = table.coalesce(Arel::Nodes::UnqualifiedColumn.new(attribute), 0)
+ expr = value < 0 ? expr - bind : expr + bind
+ expr.expr
+ end
+
+ def exec_queries(&block)
+ skip_query_cache_if_necessary do
+ @records =
+ if eager_loading?
+ apply_join_dependency do |relation, join_dependency|
+ if ActiveRecord::NullRelation === relation
+ []
+ else
+ relation = join_dependency.apply_column_aliases(relation)
+ rows = connection.select_all(relation.arel, "SQL")
+ join_dependency.instantiate(rows, &block)
+ end.freeze
+ end
+ else
+ klass.find_by_sql(arel, &block).freeze
+ end
+
+ preload_associations(@records) unless skip_preloading_value
+
+ @records.each(&:readonly!) if readonly_value
+
+ @loaded = true
+ @records
+ end
+ end
- def references_eager_loaded_tables?
- joined_tables = arel.join_sources.map do |join|
- if join.is_a?(Arel::Nodes::StringJoin)
- tables_in_string(join.left)
+ def skip_query_cache_if_necessary
+ if skip_query_cache_value
+ uncached do
+ yield
+ end
else
- [join.left.table_name, join.left.table_alias]
+ yield
end
end
- joined_tables += [table.name, table.table_alias]
+ def build_preloader
+ ActiveRecord::Associations::Preloader.new
+ end
- # always convert table names to downcase as in Oracle quoted table names are in uppercase
- joined_tables = joined_tables.flatten.compact.map(&:downcase).uniq
+ def references_eager_loaded_tables?
+ joined_tables = arel.join_sources.map do |join|
+ if join.is_a?(Arel::Nodes::StringJoin)
+ tables_in_string(join.left)
+ else
+ [join.left.table_name, join.left.table_alias]
+ end
+ end
- (references_values - joined_tables).any?
- end
+ joined_tables += [table.name, table.table_alias]
- def tables_in_string(string)
- return [] if string.blank?
- # always convert table names to downcase as in Oracle quoted table names are in uppercase
- # ignore raw_sql_ that is used by Oracle adapter as alias for limit/offset subqueries
- string.scan(/([a-zA-Z_][.\w]+).?\./).flatten.map(&:downcase).uniq - ['raw_sql_']
- end
+ # always convert table names to downcase as in Oracle quoted table names are in uppercase
+ joined_tables = joined_tables.flatten.compact.map(&:downcase).uniq
+
+ (references_values - joined_tables).any?
+ end
+
+ def tables_in_string(string)
+ return [] if string.blank?
+ # always convert table names to downcase as in Oracle quoted table names are in uppercase
+ # ignore raw_sql_ that is used by Oracle adapter as alias for limit/offset subqueries
+ string.scan(/([a-zA-Z_][.\w]+).?\./).flatten.map(&:downcase).uniq - ["raw_sql_"]
+ end
end
end
diff --git a/activerecord/lib/active_record/relation/batches.rb b/activerecord/lib/active_record/relation/batches.rb
index b99807adf3..9c579843b1 100644
--- a/activerecord/lib/active_record/relation/batches.rb
+++ b/activerecord/lib/active_record/relation/batches.rb
@@ -1,8 +1,10 @@
+# frozen_string_literal: true
+
require "active_record/relation/batches/batch_enumerator"
module ActiveRecord
module Batches
- ORDER_OR_LIMIT_IGNORED_MESSAGE = "Scoped order and limit are ignored, it's forced to be batch order and batch size"
+ ORDER_IGNORE_MESSAGE = "Scoped order is ignored, it's forced to be batch order."
# Looping through a collection of records from the database
# (using the Scoping::Named::ClassMethods.all method, for example)
@@ -30,19 +32,28 @@ module ActiveRecord
# end
#
# ==== Options
- # * <tt>:batch_size</tt> - Specifies the size of the batch. Default to 1000.
+ # * <tt>:batch_size</tt> - Specifies the size of the batch. Defaults to 1000.
# * <tt>:start</tt> - Specifies the primary key value to start from, inclusive of the value.
# * <tt>:finish</tt> - Specifies the primary key value to end at, inclusive of the value.
# * <tt>:error_on_ignore</tt> - Overrides the application config to specify if an error should be raised when
- # the order and limit have to be ignored due to batching.
+ # an order is present in the relation.
+ #
+ # Limits are honored, and if present there is no requirement for the batch
+ # size: it can be less than, equal to, or greater than the limit.
+ #
+ # The options +start+ and +finish+ are especially useful if you want
+ # multiple workers dealing with the same processing queue. You can make
+ # worker 1 handle all the records between id 1 and 9999 and worker 2
+ # handle from 10000 and beyond by setting the +:start+ and +:finish+
+ # option on each worker.
#
- # This is especially useful if you want multiple workers dealing with
- # the same processing queue. You can make worker 1 handle all the records
- # between id 0 and 10,000 and worker 2 handle from 10,000 and beyond
- # (by setting the +:start+ and +:finish+ option on each worker).
+ # # In worker 1, let's process until 9999 records.
+ # Person.find_each(finish: 9_999) do |person|
+ # person.party_all_night!
+ # end
#
- # # Let's process for a batch of 2000 records, skipping the first 2000 rows
- # Person.find_each(start: 2000, batch_size: 2000) do |person|
+ # # In worker 2, let's process from record 10_000 and onwards.
+ # Person.find_each(start: 10_000) do |person|
# person.party_all_night!
# end
#
@@ -51,8 +62,8 @@ module ActiveRecord
# work. This also means that this method only works when the primary key is
# orderable (e.g. an integer or string).
#
- # NOTE: You can't set the limit either, that's used to control
- # the batch sizes.
+ # NOTE: By its nature, batch processing is subject to race conditions if
+ # other processes are modifying the database.
def find_each(start: nil, finish: nil, batch_size: 1000, error_on_ignore: nil)
if block_given?
find_in_batches(start: start, finish: finish, batch_size: batch_size, error_on_ignore: error_on_ignore) do |records|
@@ -85,19 +96,23 @@ module ActiveRecord
# To be yielded each record one by one, use #find_each instead.
#
# ==== Options
- # * <tt>:batch_size</tt> - Specifies the size of the batch. Default to 1000.
+ # * <tt>:batch_size</tt> - Specifies the size of the batch. Defaults to 1000.
# * <tt>:start</tt> - Specifies the primary key value to start from, inclusive of the value.
# * <tt>:finish</tt> - Specifies the primary key value to end at, inclusive of the value.
# * <tt>:error_on_ignore</tt> - Overrides the application config to specify if an error should be raised when
- # the order and limit have to be ignored due to batching.
+ # an order is present in the relation.
+ #
+ # Limits are honored, and if present there is no requirement for the batch
+ # size: it can be less than, equal to, or greater than the limit.
#
- # This is especially useful if you want multiple workers dealing with
- # the same processing queue. You can make worker 1 handle all the records
- # between id 0 and 10,000 and worker 2 handle from 10,000 and beyond
- # (by setting the +:start+ and +:finish+ option on each worker).
+ # The options +start+ and +finish+ are especially useful if you want
+ # multiple workers dealing with the same processing queue. You can make
+ # worker 1 handle all the records between id 1 and 9999 and worker 2
+ # handle from 10000 and beyond by setting the +:start+ and +:finish+
+ # option on each worker.
#
- # # Let's process the next 2000 records
- # Person.find_in_batches(start: 2000, batch_size: 2000) do |group|
+ # # Let's process from record 10_000 on.
+ # Person.find_in_batches(start: 10_000) do |group|
# group.each { |person| person.party_all_night! }
# end
#
@@ -106,8 +121,8 @@ module ActiveRecord
# work. This also means that this method only works when the primary key is
# orderable (e.g. an integer or string).
#
- # NOTE: You can't set the limit either, that's used to control
- # the batch sizes.
+ # NOTE: By its nature, batch processing is subject to race conditions if
+ # other processes are modifying the database.
def find_in_batches(start: nil, finish: nil, batch_size: 1000, error_on_ignore: nil)
relation = self
unless block_given?
@@ -132,9 +147,9 @@ module ActiveRecord
# If you do not provide a block to #in_batches, it will return a
# BatchEnumerator which is enumerable.
#
- # Person.in_batches.with_index do |relation, batch_index|
+ # Person.in_batches.each_with_index do |relation, batch_index|
# puts "Processing relation ##{batch_index}"
- # relation.each { |relation| relation.delete_all }
+ # relation.delete_all
# end
#
# Examples of calling methods on the returned BatchEnumerator object:
@@ -144,22 +159,24 @@ module ActiveRecord
# Person.in_batches.each_record(&:party_all_night!)
#
# ==== Options
- # * <tt>:of</tt> - Specifies the size of the batch. Default to 1000.
- # * <tt>:load</tt> - Specifies if the relation should be loaded. Default to false.
+ # * <tt>:of</tt> - Specifies the size of the batch. Defaults to 1000.
+ # * <tt>:load</tt> - Specifies if the relation should be loaded. Defaults to false.
# * <tt>:start</tt> - Specifies the primary key value to start from, inclusive of the value.
# * <tt>:finish</tt> - Specifies the primary key value to end at, inclusive of the value.
# * <tt>:error_on_ignore</tt> - Overrides the application config to specify if an error should be raised when
- # the order and limit have to be ignored due to batching.
+ # an order is present in the relation.
+ #
+ # Limits are honored, and if present there is no requirement for the batch
+ # size, it can be less than, equal, or greater than the limit.
#
- # This is especially useful if you want to work with the
- # ActiveRecord::Relation object instead of the array of records, or if
- # you want multiple workers dealing with the same processing queue. You can
- # make worker 1 handle all the records between id 0 and 10,000 and worker 2
- # handle from 10,000 and beyond (by setting the +:start+ and +:finish+
- # option on each worker).
+ # The options +start+ and +finish+ are especially useful if you want
+ # multiple workers dealing with the same processing queue. You can make
+ # worker 1 handle all the records between id 1 and 9999 and worker 2
+ # handle from 10000 and beyond by setting the +:start+ and +:finish+
+ # option on each worker.
#
- # # Let's process the next 2000 records
- # Person.in_batches(of: 2000, start: 2000).update_all(awesome: true)
+ # # Let's process from record 10_000 on.
+ # Person.in_batches(start: 10_000).update_all(awesome: true)
#
# An example of calling where query method on the relation:
#
@@ -176,34 +193,41 @@ module ActiveRecord
#
# NOTE: It's not possible to set the order. That is automatically set to
# ascending on the primary key ("id ASC") to make the batch ordering
- # consistent. Therefore the primary key must be orderable, e.g an integer
+ # consistent. Therefore the primary key must be orderable, e.g. an integer
# or a string.
#
- # NOTE: You can't set the limit either, that's used to control the batch
- # sizes.
+ # NOTE: By its nature, batch processing is subject to race conditions if
+ # other processes are modifying the database.
def in_batches(of: 1000, start: nil, finish: nil, load: false, error_on_ignore: nil)
relation = self
unless block_given?
return BatchEnumerator.new(of: of, start: start, finish: finish, relation: self)
end
- if arel.orders.present? || arel.taken.present?
- act_on_order_or_limit_ignored(error_on_ignore)
+ if arel.orders.present?
+ act_on_ignored_order(error_on_ignore)
+ end
+
+ batch_limit = of
+ if limit_value
+ remaining = limit_value
+ batch_limit = remaining if remaining < batch_limit
end
- relation = relation.reorder(batch_order).limit(of)
+ relation = relation.reorder(batch_order).limit(batch_limit)
relation = apply_limits(relation, start, finish)
+ relation.skip_query_cache! # Retaining the results in the query cache would undermine the point of batching
batch_relation = relation
loop do
if load
records = batch_relation.records
ids = records.map(&:id)
- yielded_relation = self.where(primary_key => ids)
+ yielded_relation = where(primary_key => ids)
yielded_relation.load_records(records)
else
ids = batch_relation.pluck(primary_key)
- yielded_relation = self.where(primary_key => ids)
+ yielded_relation = where(primary_key => ids)
end
break if ids.empty?
@@ -213,31 +237,54 @@ module ActiveRecord
yield yielded_relation
- break if ids.length < of
- batch_relation = relation.where(arel_attribute(primary_key).gt(primary_key_offset))
+ break if ids.length < batch_limit
+
+ if limit_value
+ remaining -= ids.length
+
+ if remaining == 0
+ # Saves a useless iteration when the limit is a multiple of the
+ # batch size.
+ break
+ elsif remaining < batch_limit
+ relation = relation.limit(remaining)
+ end
+ end
+
+ batch_relation = relation.where(
+ bind_attribute(primary_key, primary_key_offset) { |attr, bind| attr.gt(bind) }
+ )
end
end
private
- def apply_limits(relation, start, finish)
- relation = relation.where(arel_attribute(primary_key).gteq(start)) if start
- relation = relation.where(arel_attribute(primary_key).lteq(finish)) if finish
- relation
- end
+ def apply_limits(relation, start, finish)
+ relation = apply_start_limit(relation, start) if start
+ relation = apply_finish_limit(relation, finish) if finish
+ relation
+ end
- def batch_order
- "#{quoted_table_name}.#{quoted_primary_key} ASC"
- end
+ def apply_start_limit(relation, start)
+ relation.where(bind_attribute(primary_key, start) { |attr, bind| attr.gteq(bind) })
+ end
- def act_on_order_or_limit_ignored(error_on_ignore)
- raise_error = (error_on_ignore.nil? ? self.klass.error_on_ignored_order_or_limit : error_on_ignore)
+ def apply_finish_limit(relation, finish)
+ relation.where(bind_attribute(primary_key, finish) { |attr, bind| attr.lteq(bind) })
+ end
- if raise_error
- raise ArgumentError.new(ORDER_OR_LIMIT_IGNORED_MESSAGE)
- elsif logger
- logger.warn(ORDER_OR_LIMIT_IGNORED_MESSAGE)
+ def batch_order
+ arel_attribute(primary_key).asc
+ end
+
+ def act_on_ignored_order(error_on_ignore)
+ raise_error = (error_on_ignore.nil? ? klass.error_on_ignored_order : error_on_ignore)
+
+ if raise_error
+ raise ArgumentError.new(ORDER_IGNORE_MESSAGE)
+ elsif logger
+ logger.warn(ORDER_IGNORE_MESSAGE)
+ end
end
- end
end
end
diff --git a/activerecord/lib/active_record/relation/batches/batch_enumerator.rb b/activerecord/lib/active_record/relation/batches/batch_enumerator.rb
index 13393dc605..49697da3bf 100644
--- a/activerecord/lib/active_record/relation/batches/batch_enumerator.rb
+++ b/activerecord/lib/active_record/relation/batches/batch_enumerator.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Batches
class BatchEnumerator
@@ -7,7 +9,7 @@ module ActiveRecord
@of = of
@relation = relation
@start = start
- @finish = finish
+ @finish = finish
end
# Looping through a collection of records from the database (using the
@@ -42,7 +44,7 @@ module ActiveRecord
# Delegates #delete_all, #update_all, #destroy_all methods to each batch.
#
# People.in_batches.delete_all
- # People.in_batches.destroy_all('age < 10')
+ # People.where('age < 10').in_batches.destroy_all
# People.in_batches.update_all('age = age + 1')
[:delete_all, :update_all, :destroy_all].each do |method|
define_method(method) do |*args, &block|
diff --git a/activerecord/lib/active_record/relation/calculations.rb b/activerecord/lib/active_record/relation/calculations.rb
index 54c9af4898..4f9ddf302e 100644
--- a/activerecord/lib/active_record/relation/calculations.rb
+++ b/activerecord/lib/active_record/relation/calculations.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Calculations
# Count the records.
@@ -37,7 +39,15 @@ module ActiveRecord
# Note: not all valid {Relation#select}[rdoc-ref:QueryMethods#select] expressions are valid #count expressions. The specifics differ
# between databases. In invalid cases, an error from the database is thrown.
def count(column_name = nil)
- calculate(:count, column_name)
+ if block_given?
+ unless column_name.nil?
+ raise ArgumentError, "Column name argument is not supported when a block is passed."
+ end
+
+ super()
+ else
+ calculate(:count, column_name)
+ end
end
# Calculates the average value on a given column. Returns +nil+ if there's
@@ -71,9 +81,16 @@ module ActiveRecord
# #calculate for examples with options.
#
# Person.sum(:age) # => 4562
- def sum(column_name = nil, &block)
- return super(&block) if block_given?
- calculate(:sum, column_name)
+ def sum(column_name = nil)
+ if block_given?
+ unless column_name.nil?
+ raise ArgumentError, "Column name argument is not supported when a block is passed."
+ end
+
+ super()
+ else
+ calculate(:sum, column_name)
+ end
end
# This calculates aggregate values in the given column. Methods for #count, #sum, #average,
@@ -89,7 +106,7 @@ module ActiveRecord
#
# There are two basic forms of output:
#
- # * Single aggregate value: The single value is type cast to Fixnum for COUNT, Float
+ # * Single aggregate value: The single value is type cast to Integer for COUNT, Float
# for AVG, and the given column's type for everything else.
#
# * Grouped values: This returns an ordered hash of the values and groups them. It
@@ -108,12 +125,18 @@ module ActiveRecord
# ...
# end
def calculate(operation, column_name)
- if column_name.is_a?(Symbol) && attribute_alias?(column_name)
- column_name = attribute_alias(column_name)
- end
-
if has_include?(column_name)
- construct_relation_for_association_calculations.calculate(operation, column_name)
+ relation = apply_join_dependency
+
+ if operation.to_s.downcase == "count"
+ relation.distinct!
+ # PostgreSQL: ORDER BY expressions must appear in SELECT list when using DISTINCT
+ if (column_name == :all || column_name.nil?) && select_values.empty?
+ relation.order_values = []
+ end
+ end
+
+ relation.calculate(operation, column_name)
else
perform_calculation(operation, column_name)
end
@@ -156,21 +179,39 @@ module ActiveRecord
#
def pluck(*column_names)
if loaded? && (column_names.map(&:to_s) - @klass.attribute_names - @klass.attribute_aliases.keys).empty?
- return @records.pluck(*column_names)
+ return records.pluck(*column_names)
end
if has_include?(column_names.first)
- construct_relation_for_association_calculations.pluck(*column_names)
+ relation = apply_join_dependency
+ relation.pluck(*column_names)
else
+ klass.disallow_raw_sql!(column_names)
relation = spawn
- relation.select_values = column_names.map { |cn|
- @klass.has_attribute?(cn) || @klass.attribute_alias?(cn) ? arel_attribute(cn) : cn
- }
- result = klass.connection.select_all(relation.arel, nil, bound_attributes)
+ relation.select_values = column_names
+ result = skip_query_cache_if_necessary { klass.connection.select_all(relation.arel, nil) }
result.cast_values(klass.attribute_types)
end
end
+ # Pick the value(s) from the named column(s) in the current relation.
+ # This is short-hand for <tt>relation.limit(1).pluck(*column_names).first</tt>, and is primarily useful
+ # when you have a relation that's already narrowed down to a single row.
+ #
+ # Just like #pluck, #pick will only load the actual value, not the entire record object, so it's also
+ # more efficient. The value is, again like with pluck, typecast by the column type.
+ #
+ # Person.where(id: 1).pick(:name)
+ # # SELECT people.name FROM people WHERE id = 1 LIMIT 1
+ # # => 'David'
+ #
+ # Person.where(id: 1).pick(:name, :email_address)
+ # # SELECT people.name, people.email_address FROM people WHERE id = 1 LIMIT 1
+ # # => [ 'David', 'david@loudthinking.com' ]
+ def pick(*column_names)
+ limit(1).pluck(*column_names).first
+ end
+
# Pluck all the ID's for the relation using the table's primary key
#
# Person.ids # SELECT people.id FROM people
@@ -180,202 +221,211 @@ module ActiveRecord
end
private
+ def has_include?(column_name)
+ eager_loading? || (includes_values.present? && column_name && column_name != :all)
+ end
- def has_include?(column_name)
- eager_loading? || (includes_values.present? && column_name && column_name != :all)
- end
-
- def perform_calculation(operation, column_name)
- operation = operation.to_s.downcase
-
- # If #count is used with #distinct (i.e. `relation.distinct.count`) it is
- # considered distinct.
- distinct = self.distinct_value
-
- if operation == "count"
- column_name ||= select_for_count
-
- unless arel.ast.grep(Arel::Nodes::OuterJoin).empty?
- distinct = true
+ def perform_calculation(operation, column_name)
+ operation = operation.to_s.downcase
+
+ # If #count is used with #distinct (i.e. `relation.distinct.count`) it is
+ # considered distinct.
+ distinct = distinct_value
+
+ if operation == "count"
+ column_name ||= select_for_count
+ if column_name == :all
+ if !distinct
+ distinct = distinct_select?(select_for_count) if group_values.empty?
+ elsif group_values.any? || select_values.empty? && order_values.empty?
+ column_name = primary_key
+ end
+ elsif distinct_select?(column_name)
+ distinct = nil
+ end
end
- column_name = primary_key if column_name == :all && distinct
- distinct = nil if column_name =~ /\s*DISTINCT[\s(]+/i
+ if group_values.any?
+ execute_grouped_calculation(operation, column_name, distinct)
+ else
+ execute_simple_calculation(operation, column_name, distinct)
+ end
end
- if group_values.any?
- execute_grouped_calculation(operation, column_name, distinct)
- else
- execute_simple_calculation(operation, column_name, distinct)
+ def distinct_select?(column_name)
+ column_name.is_a?(::String) && /\bDISTINCT[\s(]/i.match?(column_name)
end
- end
- def aggregate_column(column_name)
- return column_name if Arel::Expressions === column_name
+ def aggregate_column(column_name)
+ return column_name if Arel::Expressions === column_name
- if @klass.column_names.include?(column_name.to_s)
- Arel::Attribute.new(@klass.unscoped.table, column_name)
- else
- Arel.sql(column_name == :all ? "*" : column_name.to_s)
+ if @klass.has_attribute?(column_name) || @klass.attribute_alias?(column_name)
+ @klass.arel_attribute(column_name)
+ else
+ Arel.sql(column_name == :all ? "*" : column_name.to_s)
+ end
end
- end
-
- def operation_over_aggregate_column(column, operation, distinct)
- operation == 'count' ? column.count(distinct) : column.send(operation)
- end
-
- def execute_simple_calculation(operation, column_name, distinct) #:nodoc:
- # PostgreSQL doesn't like ORDER BY when there are no GROUP BY
- relation = unscope(:order)
- column_alias = column_name
+ def operation_over_aggregate_column(column, operation, distinct)
+ operation == "count" ? column.count(distinct) : column.send(operation)
+ end
- if operation == "count" && (relation.limit_value || relation.offset_value)
- # Shortcut when limit is zero.
- return 0 if relation.limit_value == 0
+ def execute_simple_calculation(operation, column_name, distinct) #:nodoc:
+ column_alias = column_name
- query_builder = build_count_subquery(relation, column_name, distinct)
- else
- column = aggregate_column(column_name)
+ if operation == "count" && (column_name == :all && distinct || has_limit_or_offset?)
+ # Shortcut when limit is zero.
+ return 0 if limit_value == 0
- select_value = operation_over_aggregate_column(column, operation, distinct)
+ query_builder = build_count_subquery(spawn, column_name, distinct)
+ else
+ # PostgreSQL doesn't like ORDER BY when there are no GROUP BY
+ relation = unscope(:order).distinct!(false)
- column_alias = select_value.alias
- column_alias ||= @klass.connection.column_name_for_operation(operation, select_value)
- relation.select_values = [select_value]
+ column = aggregate_column(column_name)
- query_builder = relation.arel
- end
+ select_value = operation_over_aggregate_column(column, operation, distinct)
+ if operation == "sum" && distinct
+ select_value.distinct = true
+ end
- result = @klass.connection.select_all(query_builder, nil, bound_attributes)
- row = result.first
- value = row && row.values.first
- column = result.column_types.fetch(column_alias) do
- type_for(column_name)
- end
+ column_alias = select_value.alias
+ column_alias ||= @klass.connection.column_name_for_operation(operation, select_value)
+ relation.select_values = [select_value]
- type_cast_calculated_value(value, column, operation)
- end
+ query_builder = relation.arel
+ end
- def execute_grouped_calculation(operation, column_name, distinct) #:nodoc:
- group_attrs = group_values
+ result = skip_query_cache_if_necessary { @klass.connection.select_all(query_builder, nil) }
+ row = result.first
+ value = row && row.values.first
+ type = result.column_types.fetch(column_alias) do
+ type_for(column_name)
+ end
- if group_attrs.first.respond_to?(:to_sym)
- association = @klass._reflect_on_association(group_attrs.first)
- associated = group_attrs.size == 1 && association && association.belongs_to? # only count belongs_to associations
- group_fields = Array(associated ? association.foreign_key : group_attrs)
- else
- group_fields = group_attrs
+ type_cast_calculated_value(value, type, operation)
end
- group_fields = arel_columns(group_fields)
- group_aliases = group_fields.map { |field| column_alias_for(field) }
- group_columns = group_aliases.zip(group_fields)
+ def execute_grouped_calculation(operation, column_name, distinct) #:nodoc:
+ group_attrs = group_values
- if operation == 'count' && column_name == :all
- aggregate_alias = 'count_all'
- else
- aggregate_alias = column_alias_for([operation, column_name].join(' '))
- end
-
- select_values = [
- operation_over_aggregate_column(
- aggregate_column(column_name),
- operation,
- distinct).as(aggregate_alias)
- ]
- select_values += select_values unless having_clause.empty?
-
- select_values.concat group_columns.map { |aliaz, field|
- if field.respond_to?(:as)
- field.as(aliaz)
+ if group_attrs.first.respond_to?(:to_sym)
+ association = @klass._reflect_on_association(group_attrs.first)
+ associated = group_attrs.size == 1 && association && association.belongs_to? # only count belongs_to associations
+ group_fields = Array(associated ? association.foreign_key : group_attrs)
else
- "#{field} AS #{aliaz}"
+ group_fields = group_attrs
end
- }
+ group_fields = arel_columns(group_fields)
- relation = except(:group)
- relation.group_values = group_fields
- relation.select_values = select_values
+ group_aliases = group_fields.map { |field| column_alias_for(field) }
+ group_columns = group_aliases.zip(group_fields)
- calculated_data = @klass.connection.select_all(relation, nil, relation.bound_attributes)
-
- if association
- key_ids = calculated_data.collect { |row| row[group_aliases.first] }
- key_records = association.klass.base_class.where(association.klass.base_class.primary_key => key_ids)
- key_records = Hash[key_records.map { |r| [r.id, r] }]
- end
+ if operation == "count" && column_name == :all
+ aggregate_alias = "count_all"
+ else
+ aggregate_alias = column_alias_for([operation, column_name].join(" "))
+ end
- Hash[calculated_data.map do |row|
- key = group_columns.map { |aliaz, col_name|
- column = calculated_data.column_types.fetch(aliaz) do
- type_for(col_name)
+ select_values = [
+ operation_over_aggregate_column(
+ aggregate_column(column_name),
+ operation,
+ distinct).as(aggregate_alias)
+ ]
+ select_values += self.select_values unless having_clause.empty?
+
+ select_values.concat group_columns.map { |aliaz, field|
+ if field.respond_to?(:as)
+ field.as(aliaz)
+ else
+ "#{field} AS #{aliaz}"
end
- type_cast_calculated_value(row[aliaz], column)
}
- key = key.first if key.size == 1
- key = key_records[key] if associated
- column_type = calculated_data.column_types.fetch(aggregate_alias) { type_for(column_name) }
- [key, type_cast_calculated_value(row[aggregate_alias], column_type, operation)]
- end]
- end
+ relation = except(:group).distinct!(false)
+ relation.group_values = group_fields
+ relation.select_values = select_values
- # Converts the given keys to the value that the database adapter returns as
- # a usable column name:
- #
- # column_alias_for("users.id") # => "users_id"
- # column_alias_for("sum(id)") # => "sum_id"
- # column_alias_for("count(distinct users.id)") # => "count_distinct_users_id"
- # column_alias_for("count(*)") # => "count_all"
- def column_alias_for(keys)
- if keys.respond_to? :name
- keys = "#{keys.relation.name}.#{keys.name}"
+ calculated_data = skip_query_cache_if_necessary { @klass.connection.select_all(relation.arel, nil) }
+
+ if association
+ key_ids = calculated_data.collect { |row| row[group_aliases.first] }
+ key_records = association.klass.base_class.where(association.klass.base_class.primary_key => key_ids)
+ key_records = Hash[key_records.map { |r| [r.id, r] }]
+ end
+
+ Hash[calculated_data.map do |row|
+ key = group_columns.map { |aliaz, col_name|
+ type = type_for(col_name) do
+ calculated_data.column_types.fetch(aliaz, Type.default_value)
+ end
+ type_cast_calculated_value(row[aliaz], type)
+ }
+ key = key.first if key.size == 1
+ key = key_records[key] if associated
+
+ type = calculated_data.column_types.fetch(aggregate_alias) { type_for(column_name) }
+ [key, type_cast_calculated_value(row[aggregate_alias], type, operation)]
+ end]
end
- table_name = keys.to_s.downcase
- table_name.gsub!(/\*/, 'all')
- table_name.gsub!(/\W+/, ' ')
- table_name.strip!
- table_name.gsub!(/ +/, '_')
+ # Converts the given keys to the value that the database adapter returns as
+ # a usable column name:
+ #
+ # column_alias_for("users.id") # => "users_id"
+ # column_alias_for("sum(id)") # => "sum_id"
+ # column_alias_for("count(distinct users.id)") # => "count_distinct_users_id"
+ # column_alias_for("count(*)") # => "count_all"
+ def column_alias_for(keys)
+ if keys.respond_to? :name
+ keys = "#{keys.relation.name}.#{keys.name}"
+ end
- @klass.connection.table_alias_for(table_name)
- end
+ table_name = keys.to_s.downcase
+ table_name.gsub!(/\*/, "all")
+ table_name.gsub!(/\W+/, " ")
+ table_name.strip!
+ table_name.gsub!(/ +/, "_")
- def type_for(field)
- field_name = field.respond_to?(:name) ? field.name.to_s : field.to_s.split('.').last
- @klass.type_for_attribute(field_name)
- end
+ @klass.connection.table_alias_for(table_name)
+ end
- def type_cast_calculated_value(value, type, operation = nil)
- case operation
- when 'count' then value.to_i
- when 'sum' then type.deserialize(value || 0)
- when 'average' then value.respond_to?(:to_d) ? value.to_d : value
+ def type_for(field, &block)
+ field_name = field.respond_to?(:name) ? field.name.to_s : field.to_s.split(".").last
+ @klass.type_for_attribute(field_name, &block)
+ end
+
+ def type_cast_calculated_value(value, type, operation = nil)
+ case operation
+ when "count" then value.to_i
+ when "sum" then type.deserialize(value || 0)
+ when "average" then value&.respond_to?(:to_d) ? value.to_d : value
else type.deserialize(value)
+ end
end
- end
- def select_for_count
- if select_values.present?
- return select_values.first if select_values.one?
- select_values.join(", ")
- else
- :all
+ def select_for_count
+ if select_values.present?
+ return select_values.first if select_values.one?
+ select_values.join(", ")
+ else
+ :all
+ end
end
- end
- def build_count_subquery(relation, column_name, distinct)
- column_alias = Arel.sql('count_column')
- subquery_alias = Arel.sql('subquery_for_count')
+ def build_count_subquery(relation, column_name, distinct)
+ if column_name == :all
+ relation.select_values = [ Arel.sql(FinderMethods::ONE_AS_ONE) ] unless distinct
+ else
+ column_alias = Arel.sql("count_column")
+ relation.select_values = [ aggregate_column(column_name).as(column_alias) ]
+ end
- aliased_column = aggregate_column(column_name == :all ? 1 : column_name).as(column_alias)
- relation.select_values = [aliased_column]
- subquery = relation.arel.as(subquery_alias)
+ subquery = relation.arel.as(Arel.sql("subquery_for_count"))
+ select_value = operation_over_aggregate_column(column_alias || Arel.star, "count", false)
- sm = Arel::SelectManager.new relation.engine
- select_value = operation_over_aggregate_column(column_alias, 'count', distinct)
- sm.project(select_value).from(subquery)
- end
+ Arel::SelectManager.new(subquery).project(select_value)
+ end
end
end
diff --git a/activerecord/lib/active_record/relation/delegation.rb b/activerecord/lib/active_record/relation/delegation.rb
index f2578f5f96..7a53a9d1c7 100644
--- a/activerecord/lib/active_record/relation/delegation.rb
+++ b/activerecord/lib/active_record/relation/delegation.rb
@@ -1,5 +1,6 @@
-require 'set'
-require 'active_support/concern'
+# frozen_string_literal: true
+
+require "mutex_m"
module ActiveRecord
module Delegation # :nodoc:
@@ -18,7 +19,11 @@ module ActiveRecord
delegate = Class.new(klass) {
include ClassSpecificRelation
}
- const_set klass.name.gsub('::'.freeze, '_'.freeze), delegate
+ include_relation_methods(delegate)
+ mangled_name = klass.name.gsub("::", "_")
+ const_set mangled_name, delegate
+ private_constant mangled_name
+
cache[klass] = delegate
end
end
@@ -27,7 +32,45 @@ module ActiveRecord
child_class.initialize_relation_delegate_cache
super
end
+
+ def generate_relation_method(method)
+ generated_relation_methods.generate_method(method)
+ end
+
+ protected
+ def include_relation_methods(delegate)
+ superclass.include_relation_methods(delegate) unless base_class?
+ delegate.include generated_relation_methods
+ end
+
+ private
+ def generated_relation_methods
+ @generated_relation_methods ||= GeneratedRelationMethods.new
+ end
+ end
+
+ class GeneratedRelationMethods < Module # :nodoc:
+ include Mutex_m
+
+ def generate_method(method)
+ synchronize do
+ return if method_defined?(method)
+
+ if /\A[a-zA-Z_]\w*[!?]?\z/.match?(method)
+ module_eval <<-RUBY, __FILE__, __LINE__ + 1
+ def #{method}(*args, &block)
+ scoping { klass.#{method}(*args, &block) }
+ end
+ RUBY
+ else
+ define_method(method) do |*args, &block|
+ scoping { klass.public_send(method, *args, &block) }
+ end
+ end
+ end
+ end
end
+ private_constant :GeneratedRelationMethods
extend ActiveSupport::Concern
@@ -36,64 +79,32 @@ module ActiveRecord
# may vary depending on the klass of a relation, so we create a subclass of Relation
# for each different klass, and the delegations are compiled into that subclass only.
- delegate :to_xml, :to_yaml, :length, :collect, :map, :each, :all?, :include?, :to_ary, :join,
- :[], :&, :|, :+, :-, :sample, :reverse, :compact, :in_groups, :in_groups_of,
- :shuffle, :split, to: :records
+ delegate :to_xml, :encode_with, :length, :each, :join,
+ :[], :&, :|, :+, :-, :sample, :reverse, :rotate, :compact, :in_groups, :in_groups_of,
+ :to_sentence, :to_formatted_s, :as_json,
+ :shuffle, :split, :slice, :index, :rindex, to: :records
- delegate :table_name, :quoted_table_name, :primary_key, :quoted_primary_key,
- :connection, :columns_hash, :to => :klass
+ delegate :primary_key, :connection, to: :klass
module ClassSpecificRelation # :nodoc:
extend ActiveSupport::Concern
- included do
- @delegation_mutex = Mutex.new
- end
-
module ClassMethods # :nodoc:
def name
superclass.name
end
+ end
- def delegate_to_scoped_klass(method)
- @delegation_mutex.synchronize do
- return if method_defined?(method)
-
- if method.to_s =~ /\A[a-zA-Z_]\w*[!?]?\z/
- module_eval <<-RUBY, __FILE__, __LINE__ + 1
- def #{method}(*args, &block)
- scoping { @klass.#{method}(*args, &block) }
- end
- RUBY
- else
- define_method method do |*args, &block|
- scoping { @klass.public_send(method, *args, &block) }
- end
- end
- end
- end
+ private
- def delegate(method, opts = {})
- @delegation_mutex.synchronize do
- return if method_defined?(method)
+ def method_missing(method, *args, &block)
+ if @klass.respond_to?(method)
+ @klass.generate_relation_method(method)
+ scoping { @klass.public_send(method, *args, &block) }
+ else
super
end
end
- end
-
- protected
-
- def method_missing(method, *args, &block)
- if @klass.respond_to?(method)
- self.class.delegate_to_scoped_klass(method)
- scoping { @klass.public_send(method, *args, &block) }
- elsif arel.respond_to?(method)
- self.class.delegate method, :to => :arel
- arel.public_send(method, *args, &block)
- else
- super
- end
- end
end
module ClassMethods # :nodoc:
@@ -103,26 +114,14 @@ module ActiveRecord
private
- def relation_class_for(klass)
- klass.relation_delegate_class(self)
- end
- end
-
- def respond_to?(method, include_private = false)
- super || @klass.respond_to?(method, include_private) ||
- arel.respond_to?(method, include_private)
+ def relation_class_for(klass)
+ klass.relation_delegate_class(self)
+ end
end
- protected
-
- def method_missing(method, *args, &block)
- if @klass.respond_to?(method)
- scoping { @klass.public_send(method, *args, &block) }
- elsif arel.respond_to?(method)
- arel.public_send(method, *args, &block)
- else
- super
+ private
+ def respond_to_missing?(method, _)
+ super || @klass.respond_to?(method)
end
- end
end
end
diff --git a/activerecord/lib/active_record/relation/finder_methods.rb b/activerecord/lib/active_record/relation/finder_methods.rb
index 27dd0b4143..e2efd4aa0d 100644
--- a/activerecord/lib/active_record/relation/finder_methods.rb
+++ b/activerecord/lib/active_record/relation/finder_methods.rb
@@ -1,12 +1,14 @@
-require 'active_support/core_ext/string/filters'
+# frozen_string_literal: true
+
+require "active_support/core_ext/string/filters"
module ActiveRecord
module FinderMethods
- ONE_AS_ONE = '1 AS one'
+ ONE_AS_ONE = "1 AS one"
# Find by id - This can either be a specific id (1), a list of ids (1, 5, 6), or an array of ids ([5, 6, 10]).
- # If one or more records can not be found for the requested ids, then RecordNotFound will be raised. If the primary key
- # is an integer, find by id coerces its arguments using +to_i+.
+ # If one or more records cannot be found for the requested ids, then ActiveRecord::RecordNotFound will be raised.
+ # If the primary key is an integer, find by id coerces its arguments by using +to_i+.
#
# Person.find(1) # returns the object for ID = 1
# Person.find("1") # returns the object for ID = 1
@@ -16,9 +18,10 @@ module ActiveRecord
# Person.find([1]) # returns an array for the object with ID = 1
# Person.where("administrator = 1").order("created_on DESC").find(1)
#
- # NOTE: The returned records may not be in the same order as the ids you
- # provide since database rows are unordered. You'd need to provide an explicit QueryMethods#order
- # option if you want the results are sorted.
+ # NOTE: The returned records are in the same order as the ids you provide.
+ # If you want the results to be sorted by database, you can use ActiveRecord::QueryMethods#where
+ # method and provide an explicit ActiveRecord::QueryMethods#order option.
+ # But ActiveRecord::QueryMethods#where method doesn't raise ActiveRecord::RecordNotFound.
#
# ==== Find with lock
#
@@ -76,17 +79,12 @@ module ActiveRecord
# Post.find_by "published_at < ?", 2.weeks.ago
def find_by(arg, *args)
where(arg, *args).take
- rescue RangeError
- nil
end
# Like #find_by, except that if no record is found, raises
# an ActiveRecord::RecordNotFound error.
def find_by!(arg, *args)
where(arg, *args).take!
- rescue RangeError
- raise RecordNotFound.new("Couldn't find #{@klass.name} with an out of range value",
- @klass.name)
end
# Gives a record (or N records if a parameter is supplied) without any implied
@@ -97,13 +95,13 @@ module ActiveRecord
# Person.take(5) # returns 5 objects fetched by SELECT * FROM people LIMIT 5
# Person.where(["name LIKE '%?'", name]).take
def take(limit = nil)
- limit ? limit(limit).to_a : find_take
+ limit ? find_take_with_limit(limit) : find_take
end
# Same as #take but raises ActiveRecord::RecordNotFound if no record
# is found. Note that #take! accepts no arguments.
def take!
- take or raise RecordNotFound.new("Couldn't find #{@klass.name} with [#{arel.where_sql(@klass.arel_engine)}]")
+ take || raise_record_not_found_exception!
end
# Find the first record (or first N records if a parameter is supplied).
@@ -117,7 +115,7 @@ module ActiveRecord
#
def first(limit = nil)
if limit
- find_nth_with_limit_and_offset(0, limit, offset: offset_index)
+ find_nth_with_limit(0, limit)
else
find_nth 0
end
@@ -126,7 +124,7 @@ module ActiveRecord
# Same as #first but raises ActiveRecord::RecordNotFound if no record
# is found. Note that #first! accepts no arguments.
def first!
- find_nth! 0
+ first || raise_record_not_found_exception!
end
# Find the last record (or last N records if a parameter is supplied).
@@ -145,27 +143,18 @@ module ActiveRecord
#
# [#<Person id:4>, #<Person id:3>, #<Person id:2>]
def last(limit = nil)
- return find_last(limit) if loaded? || limit_value
+ return find_last(limit) if loaded? || has_limit_or_offset?
- result = limit(limit || 1)
- result.order!(arel_attribute(primary_key)) if order_values.empty? && primary_key
+ result = ordered_relation.limit(limit)
result = result.reverse_order!
limit ? result.reverse : result.first
- rescue ActiveRecord::IrreversibleOrderError
- ActiveSupport::Deprecation.warn(<<-WARNING.squish)
- Finding a last element by loading the relation when SQL ORDER
- can not be reversed is deprecated.
- Rails 5.1 will raise ActiveRecord::IrreversibleOrderError in this case.
- Please call `to_a.last` if you still want to load the relation.
- WARNING
- find_last(limit)
end
# Same as #last but raises ActiveRecord::RecordNotFound if no record
# is found. Note that #last! accepts no arguments.
def last!
- last or raise RecordNotFound.new("Couldn't find #{@klass.name} with [#{arel.where_sql(@klass.arel_engine)}]")
+ last || raise_record_not_found_exception!
end
# Find the second record.
@@ -181,7 +170,7 @@ module ActiveRecord
# Same as #second but raises ActiveRecord::RecordNotFound if no record
# is found.
def second!
- find_nth! 1
+ second || raise_record_not_found_exception!
end
# Find the third record.
@@ -197,7 +186,7 @@ module ActiveRecord
# Same as #third but raises ActiveRecord::RecordNotFound if no record
# is found.
def third!
- find_nth! 2
+ third || raise_record_not_found_exception!
end
# Find the fourth record.
@@ -213,7 +202,7 @@ module ActiveRecord
# Same as #fourth but raises ActiveRecord::RecordNotFound if no record
# is found.
def fourth!
- find_nth! 3
+ fourth || raise_record_not_found_exception!
end
# Find the fifth record.
@@ -229,7 +218,7 @@ module ActiveRecord
# Same as #fifth but raises ActiveRecord::RecordNotFound if no record
# is found.
def fifth!
- find_nth! 4
+ fifth || raise_record_not_found_exception!
end
# Find the forty-second record. Also known as accessing "the reddit".
@@ -245,7 +234,7 @@ module ActiveRecord
# Same as #forty_two but raises ActiveRecord::RecordNotFound if no record
# is found.
def forty_two!
- find_nth! 41
+ forty_two || raise_record_not_found_exception!
end
# Find the third-to-last record.
@@ -261,7 +250,7 @@ module ActiveRecord
# Same as #third_to_last but raises ActiveRecord::RecordNotFound if no record
# is found.
def third_to_last!
- find_nth_from_last 3 or raise RecordNotFound.new("Couldn't find #{@klass.name} with [#{arel.where_sql(@klass.arel_engine)}]")
+ third_to_last || raise_record_not_found_exception!
end
# Find the second-to-last record.
@@ -277,7 +266,7 @@ module ActiveRecord
# Same as #second_to_last but raises ActiveRecord::RecordNotFound if no record
# is found.
def second_to_last!
- find_nth_from_last 2 or raise RecordNotFound.new("Couldn't find #{@klass.name} with [#{arel.where_sql(@klass.arel_engine)}]")
+ second_to_last || raise_record_not_found_exception!
end
# Returns true if a record exists in the table that matches the +id+ or
@@ -291,7 +280,7 @@ module ActiveRecord
# * Hash - Finds the record that matches these +find+-style conditions
# (such as <tt>{name: 'David'}</tt>).
# * +false+ - Returns always +false+.
- # * No args - Returns +false+ if the table is empty, +true+ otherwise.
+ # * No args - Returns +false+ if the relation is empty, +true+ otherwise.
#
# For more information about specifying conditions as a hash or array,
# see the Conditions section in the introduction to ActiveRecord::Base.
@@ -307,32 +296,25 @@ module ActiveRecord
# Person.exists?(name: 'David')
# Person.exists?(false)
# Person.exists?
+ # Person.where(name: 'Spartacus', rating: 4).exists?
def exists?(conditions = :none)
if Base === conditions
- conditions = conditions.id
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
+ raise ArgumentError, <<-MSG.squish
You are passing an instance of ActiveRecord::Base to `exists?`.
- Please pass the id of the object by calling `.id`
+ Please pass the id of the object by calling `.id`.
MSG
end
- return false if !conditions
-
- relation = apply_join_dependency(self, construct_join_dependency)
- return false if ActiveRecord::NullRelation === relation
-
- relation = relation.except(:select, :order).select(ONE_AS_ONE).limit(1)
+ return false if !conditions || limit_value == 0
- case conditions
- when Array, Hash
- relation = relation.where(conditions)
- else
- unless conditions == :none
- relation = relation.where(primary_key => conditions)
- end
+ if eager_loading?
+ relation = apply_join_dependency(eager_loading: false)
+ return relation.exists?(conditions)
end
- connection.select_value(relation, "#{name} Exists", relation.bound_attributes) ? true : false
+ relation = construct_relation_for_exists(conditions)
+
+ skip_query_cache_if_necessary { connection.select_one(relation.arel, "#{name} Exists") } ? true : false
end
# This method is called whenever no records are found with either a single
@@ -343,256 +325,232 @@ module ActiveRecord
# of results obtained should be provided in the +result_size+ argument and
# the expected number of results should be provided in the +expected_size+
# argument.
- def raise_record_not_found_exception!(ids, result_size, expected_size) #:nodoc:
- conditions = arel.where_sql(@klass.arel_engine)
+ def raise_record_not_found_exception!(ids = nil, result_size = nil, expected_size = nil, key = primary_key, not_found_ids = nil) # :nodoc:
+ conditions = arel.where_sql(@klass)
conditions = " [#{conditions}]" if conditions
-
- if Array(ids).size == 1
- error = "Couldn't find #{@klass.name} with '#{primary_key}'=#{ids}#{conditions}"
+ name = @klass.name
+
+ if ids.nil?
+ error = +"Couldn't find #{name}"
+ error << " with#{conditions}" if conditions
+ raise RecordNotFound.new(error, name, key)
+ elsif Array(ids).size == 1
+ error = "Couldn't find #{name} with '#{key}'=#{ids}#{conditions}"
+ raise RecordNotFound.new(error, name, key, ids)
else
- error = "Couldn't find all #{@klass.name.pluralize} with '#{primary_key}': "
- error << "(#{ids.join(", ")})#{conditions} (found #{result_size} results, but was looking for #{expected_size})"
+ error = +"Couldn't find all #{name.pluralize} with '#{key}': "
+ error << "(#{ids.join(", ")})#{conditions} (found #{result_size} results, but was looking for #{expected_size})."
+ error << " Couldn't find #{name.pluralize(not_found_ids.size)} with #{key.to_s.pluralize(not_found_ids.size)} #{not_found_ids.join(', ')}." if not_found_ids
+ raise RecordNotFound.new(error, name, key, ids)
end
-
- raise RecordNotFound, error
end
private
- def offset_index
- offset_value || 0
- end
+ def offset_index
+ offset_value || 0
+ end
- def find_with_associations
- # NOTE: the JoinDependency constructed here needs to know about
- # any joins already present in `self`, so pass them in
- #
- # failing to do so means that in cases like activerecord/test/cases/associations/inner_join_association_test.rb:136
- # incorrect SQL is generated. In that case, the join dependency for
- # SpecialCategorizations is constructed without knowledge of the
- # preexisting join in joins_values to categorizations (by way of
- # the `has_many :through` for categories).
- #
- join_dependency = construct_join_dependency(joins_values)
-
- aliases = join_dependency.aliases
- relation = select aliases.columns
- relation = apply_join_dependency(relation, join_dependency)
-
- if block_given?
- yield relation
- else
- if ActiveRecord::NullRelation === relation
- []
+ def construct_relation_for_exists(conditions)
+ conditions = sanitize_forbidden_attributes(conditions)
+
+ if distinct_value && offset_value
+ relation = limit(1)
else
- arel = relation.arel
- rows = connection.select_all(arel, 'SQL', relation.bound_attributes)
- join_dependency.instantiate(rows, aliases)
+ relation = except(:select, :distinct, :order)._select!(ONE_AS_ONE).limit!(1)
end
- end
- end
- def construct_join_dependency(joins = [])
- including = eager_load_values + includes_values
- ActiveRecord::Associations::JoinDependency.new(@klass, including, joins)
- end
+ case conditions
+ when Array, Hash
+ relation.where!(conditions) unless conditions.empty?
+ else
+ relation.where!(primary_key => conditions) unless conditions == :none
+ end
- def construct_relation_for_association_calculations
- from = arel.froms.first
- if Arel::Table === from
- apply_join_dependency(self, construct_join_dependency(joins_values))
- else
- # FIXME: as far as I can tell, `from` will always be an Arel::Table.
- # There are no tests that test this branch, but presumably it's
- # possible for `from` to be a list?
- apply_join_dependency(self, construct_join_dependency(from))
+ relation
end
- end
- def apply_join_dependency(relation, join_dependency)
- relation = relation.except(:includes, :eager_load, :preload)
- relation = relation.joins join_dependency
+ def construct_join_dependency(associations)
+ ActiveRecord::Associations::JoinDependency.new(
+ klass, table, associations
+ )
+ end
- if using_limitable_reflections?(join_dependency.reflections)
- relation
- else
- if relation.limit_value
- limited_ids = limited_ids_for(relation)
- limited_ids.empty? ? relation.none! : relation.where!(primary_key => limited_ids)
+ def apply_join_dependency(eager_loading: group_values.empty?)
+ join_dependency = construct_join_dependency(eager_load_values + includes_values)
+ relation = except(:includes, :eager_load, :preload).joins!(join_dependency)
+
+ if eager_loading && !using_limitable_reflections?(join_dependency.reflections)
+ if has_limit_or_offset?
+ limited_ids = limited_ids_for(relation)
+ limited_ids.empty? ? relation.none! : relation.where!(primary_key => limited_ids)
+ end
+ relation.limit_value = relation.offset_value = nil
+ end
+
+ if block_given?
+ yield relation, join_dependency
+ else
+ relation
end
- relation.except(:limit, :offset)
end
- end
- def limited_ids_for(relation)
- values = @klass.connection.columns_for_distinct(
- "#{quoted_table_name}.#{quoted_primary_key}", relation.order_values)
+ def limited_ids_for(relation)
+ values = @klass.connection.columns_for_distinct(
+ connection.visitor.compile(arel_attribute(primary_key)),
+ relation.order_values
+ )
- relation = relation.except(:select).select(values).distinct!
- arel = relation.arel
+ relation = relation.except(:select).select(values).distinct!
- id_rows = @klass.connection.select_all(arel, 'SQL', relation.bound_attributes)
- id_rows.map {|row| row[primary_key]}
- end
+ id_rows = skip_query_cache_if_necessary { @klass.connection.select_all(relation.arel, "SQL") }
+ id_rows.map { |row| row[primary_key] }
+ end
- def using_limitable_reflections?(reflections)
- reflections.none?(&:collection?)
- end
+ def using_limitable_reflections?(reflections)
+ reflections.none?(&:collection?)
+ end
- protected
+ def find_with_ids(*ids)
+ raise UnknownPrimaryKey.new(@klass) if primary_key.nil?
- def find_with_ids(*ids)
- raise UnknownPrimaryKey.new(@klass) if primary_key.nil?
+ expects_array = ids.first.kind_of?(Array)
+ return [] if expects_array && ids.first.empty?
- expects_array = ids.first.kind_of?(Array)
- return ids.first if expects_array && ids.first.empty?
+ ids = ids.flatten.compact.uniq
- ids = ids.flatten.compact.uniq
+ model_name = @klass.name
- case ids.size
- when 0
- raise RecordNotFound, "Couldn't find #{@klass.name} without an ID"
- when 1
- result = find_one(ids.first)
- expects_array ? [ result ] : result
- else
- find_some(ids)
+ case ids.size
+ when 0
+ error_message = "Couldn't find #{model_name} without an ID"
+ raise RecordNotFound.new(error_message, model_name, primary_key)
+ when 1
+ result = find_one(ids.first)
+ expects_array ? [ result ] : result
+ else
+ find_some(ids)
+ end
end
- rescue RangeError
- raise RecordNotFound, "Couldn't find #{@klass.name} with an out of range ID"
- end
- def find_one(id)
- if ActiveRecord::Base === id
- id = id.id
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
- You are passing an instance of ActiveRecord::Base to `find`.
- Please pass the id of the object by calling `.id`
- MSG
- end
+ def find_one(id)
+ if ActiveRecord::Base === id
+ raise ArgumentError, <<-MSG.squish
+ You are passing an instance of ActiveRecord::Base to `find`.
+ Please pass the id of the object by calling `.id`.
+ MSG
+ end
- relation = where(primary_key => id)
- record = relation.take
+ relation = where(primary_key => id)
+ record = relation.take
- raise_record_not_found_exception!(id, 0, 1) unless record
+ raise_record_not_found_exception!(id, 0, 1) unless record
- record
- end
+ record
+ end
- def find_some(ids)
- return find_some_ordered(ids) unless order_values.present?
+ def find_some(ids)
+ return find_some_ordered(ids) unless order_values.present?
- result = where(primary_key => ids).to_a
+ result = where(primary_key => ids).to_a
- expected_size =
- if limit_value && ids.size > limit_value
- limit_value
- else
- ids.size
- end
+ expected_size =
+ if limit_value && ids.size > limit_value
+ limit_value
+ else
+ ids.size
+ end
- # 11 ids with limit 3, offset 9 should give 2 results.
- if offset_value && (ids.size - offset_value < expected_size)
- expected_size = ids.size - offset_value
- end
+ # 11 ids with limit 3, offset 9 should give 2 results.
+ if offset_value && (ids.size - offset_value < expected_size)
+ expected_size = ids.size - offset_value
+ end
- if result.size == expected_size
- result
- else
- raise_record_not_found_exception!(ids, result.size, expected_size)
+ if result.size == expected_size
+ result
+ else
+ raise_record_not_found_exception!(ids, result.size, expected_size)
+ end
end
- end
- def find_some_ordered(ids)
- ids = ids.slice(offset_value || 0, limit_value || ids.size) || []
+ def find_some_ordered(ids)
+ ids = ids.slice(offset_value || 0, limit_value || ids.size) || []
- result = except(:limit, :offset).where(primary_key => ids).records
+ result = except(:limit, :offset).where(primary_key => ids).records
- if result.size == ids.size
- pk_type = @klass.type_for_attribute(primary_key)
+ if result.size == ids.size
+ pk_type = @klass.type_for_attribute(primary_key)
- records_by_id = result.index_by(&:id)
- ids.map { |id| records_by_id.fetch(pk_type.cast(id)) }
- else
- raise_record_not_found_exception!(ids, result.size, ids.size)
+ records_by_id = result.index_by(&:id)
+ ids.map { |id| records_by_id.fetch(pk_type.cast(id)) }
+ else
+ raise_record_not_found_exception!(ids, result.size, ids.size)
+ end
end
- end
- def find_take
- if loaded?
- @records.first
- else
- @take ||= limit(1).records.first
+ def find_take
+ if loaded?
+ records.first
+ else
+ @take ||= limit(1).records.first
+ end
end
- end
- def find_nth(index, offset = nil)
- # TODO: once the offset argument is removed we rely on offset_index
- # within find_nth_with_limit, rather than pass it in via
- # find_nth_with_limit_and_offset
- if offset
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
- Passing an offset argument to find_nth is deprecated,
- please use Relation#offset instead.
- MSG
+ def find_take_with_limit(limit)
+ if loaded?
+ records.take(limit)
+ else
+ limit(limit).to_a
+ end
end
- if loaded?
- @records[index]
- else
- offset ||= offset_index
- @offsets[offset + index] ||= find_nth_with_limit_and_offset(index, 1, offset: offset).first
+
+ def find_nth(index)
+ @offsets[offset_index + index] ||= find_nth_with_limit(index, 1).first
end
- end
- def find_nth!(index)
- find_nth(index) or raise RecordNotFound.new("Couldn't find #{@klass.name} with [#{arel.where_sql(@klass.arel_engine)}]")
- end
+ def find_nth_with_limit(index, limit)
+ if loaded?
+ records[index, limit] || []
+ else
+ relation = ordered_relation
+
+ if limit_value
+ limit = [limit_value - index, limit].min
+ end
+
+ if limit > 0
+ relation = relation.offset(offset_index + index) unless index.zero?
+ relation.limit(limit).to_a
+ else
+ []
+ end
+ end
+ end
- def find_nth_with_limit(index, limit)
- # TODO: once the offset argument is removed from find_nth,
- # find_nth_with_limit_and_offset can be merged into this method
- relation = if order_values.empty? && primary_key
- order(arel_attribute(primary_key).asc)
- else
- self
- end
-
- relation = relation.offset(index) unless index.zero?
- relation.limit(limit).to_a
- end
+ def find_nth_from_last(index)
+ if loaded?
+ records[-index]
+ else
+ relation = ordered_relation
- def find_nth_from_last(index)
- if loaded?
- @records[-index]
- else
- relation = if order_values.empty? && primary_key
- order(arel_attribute(primary_key).asc)
- else
- self
- end
-
- relation.to_a[-index]
- # TODO: can be made more performant on large result sets by
- # for instance, last(index)[-index] (which would require
- # refactoring the last(n) finder method to make test suite pass),
- # or by using a combination of reverse_order, limit, and offset,
- # e.g., reverse_order.offset(index-1).first
+ if equal?(relation) || has_limit_or_offset?
+ relation.records[-index]
+ else
+ relation.last(index)[-index]
+ end
+ end
end
- end
-
- private
- def find_nth_with_limit_and_offset(index, limit, offset:) # :nodoc:
- if loaded?
- @records[index, limit]
- else
- index += offset
- find_nth_with_limit(index, limit)
+ def find_last(limit)
+ limit ? records.last(limit) : records.last
end
- end
- def find_last(limit)
- limit ? records.last(limit) : records.last
- end
+ def ordered_relation
+ if order_values.empty? && (implicit_order_column || primary_key)
+ order(arel_attribute(implicit_order_column || primary_key).asc)
+ else
+ self
+ end
+ end
end
end
diff --git a/activerecord/lib/active_record/relation/from_clause.rb b/activerecord/lib/active_record/relation/from_clause.rb
index 8945cb0cc5..c53a682aee 100644
--- a/activerecord/lib/active_record/relation/from_clause.rb
+++ b/activerecord/lib/active_record/relation/from_clause.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
class Relation
class FromClause # :nodoc:
@@ -8,14 +10,6 @@ module ActiveRecord
@name = name
end
- def binds
- if value.is_a?(Relation)
- value.bound_attributes
- else
- []
- end
- end
-
def merge(other)
self
end
diff --git a/activerecord/lib/active_record/relation/merger.rb b/activerecord/lib/active_record/relation/merger.rb
index 396638d74d..4de7465128 100644
--- a/activerecord/lib/active_record/relation/merger.rb
+++ b/activerecord/lib/active_record/relation/merger.rb
@@ -1,4 +1,6 @@
-require 'active_support/core_ext/hash/keys'
+# frozen_string_literal: true
+
+require "active_support/core_ext/hash/keys"
module ActiveRecord
class Relation
@@ -21,7 +23,11 @@ module ActiveRecord
# build a relation to merge in rather than directly merging
# the values.
def other
- other = Relation.create(relation.klass, relation.table, relation.predicate_builder)
+ other = Relation.create(
+ relation.klass,
+ table: relation.table,
+ predicate_builder: relation.predicate_builder
+ )
hash.each { |k, v|
if k == :joins
if Hash === v
@@ -50,7 +56,7 @@ module ActiveRecord
NORMAL_VALUES = Relation::VALUE_METHODS -
Relation::CLAUSE_METHODS -
- [:includes, :preload, :joins, :order, :reverse_order, :lock, :create_with, :reordering] # :nodoc:
+ [:includes, :preload, :joins, :left_outer_joins, :order, :reverse_order, :lock, :create_with, :reordering] # :nodoc:
def normal_values
NORMAL_VALUES
@@ -77,91 +83,107 @@ module ActiveRecord
merge_clauses
merge_preloads
merge_joins
+ merge_outer_joins
relation
end
private
- def merge_preloads
- return if other.preload_values.empty? && other.includes_values.empty?
+ def merge_preloads
+ return if other.preload_values.empty? && other.includes_values.empty?
- if other.klass == relation.klass
- relation.preload!(*other.preload_values) unless other.preload_values.empty?
- relation.includes!(other.includes_values) unless other.includes_values.empty?
- else
- reflection = relation.klass.reflect_on_all_associations.find do |r|
- r.class_name == other.klass.name
- end || return
+ if other.klass == relation.klass
+ relation.preload!(*other.preload_values) unless other.preload_values.empty?
+ relation.includes!(other.includes_values) unless other.includes_values.empty?
+ else
+ reflection = relation.klass.reflect_on_all_associations.find do |r|
+ r.class_name == other.klass.name
+ end || return
- unless other.preload_values.empty?
- relation.preload! reflection.name => other.preload_values
- end
+ unless other.preload_values.empty?
+ relation.preload! reflection.name => other.preload_values
+ end
- unless other.includes_values.empty?
- relation.includes! reflection.name => other.includes_values
+ unless other.includes_values.empty?
+ relation.includes! reflection.name => other.includes_values
+ end
end
end
- end
- def merge_joins
- return if other.joins_values.blank?
+ def merge_joins
+ return if other.joins_values.blank?
- if other.klass == relation.klass
- relation.joins!(*other.joins_values)
- else
- joins_dependency, rest = other.joins_values.partition do |join|
- case join
- when Hash, Symbol, Array
- true
- else
- false
+ if other.klass == relation.klass
+ relation.joins!(*other.joins_values)
+ else
+ joins_dependency = other.joins_values.map do |join|
+ case join
+ when Hash, Symbol, Array
+ other.send(:construct_join_dependency, join)
+ else
+ join
+ end
end
+
+ relation.joins!(*joins_dependency)
end
+ end
- join_dependency = ActiveRecord::Associations::JoinDependency.new(other.klass,
- joins_dependency,
- [])
- relation.joins! rest
+ def merge_outer_joins
+ return if other.left_outer_joins_values.blank?
- @relation = relation.joins join_dependency
- end
- end
+ if other.klass == relation.klass
+ relation.left_outer_joins!(*other.left_outer_joins_values)
+ else
+ joins_dependency = other.left_outer_joins_values.map do |join|
+ case join
+ when Hash, Symbol, Array
+ other.send(:construct_join_dependency, join)
+ else
+ join
+ end
+ end
- def merge_multi_values
- if other.reordering_value
- # override any order specified in the original relation
- relation.reorder! other.order_values
- elsif other.order_values
- # merge in order_values from relation
- relation.order! other.order_values
+ relation.left_outer_joins!(*joins_dependency)
+ end
end
- relation.extend(*other.extending_values) unless other.extending_values.blank?
- end
+ def merge_multi_values
+ if other.reordering_value
+ # override any order specified in the original relation
+ relation.reorder!(*other.order_values)
+ elsif other.order_values.any?
+ # merge in order_values from relation
+ relation.order!(*other.order_values)
+ end
- def merge_single_values
- if relation.from_clause.empty?
- relation.from_clause = other.from_clause
+ extensions = other.extensions - relation.extensions
+ relation.extending!(*extensions) if extensions.any?
end
- relation.lock_value ||= other.lock_value
- unless other.create_with_value.blank?
- relation.create_with_value = (relation.create_with_value || {}).merge(other.create_with_value)
+ def merge_single_values
+ relation.lock_value ||= other.lock_value if other.lock_value
+
+ unless other.create_with_value.blank?
+ relation.create_with_value = (relation.create_with_value || {}).merge(other.create_with_value)
+ end
end
- end
- CLAUSE_METHOD_NAMES = CLAUSE_METHODS.map do |name|
- ["#{name}_clause", "#{name}_clause="]
- end
+ def merge_clauses
+ relation.from_clause = other.from_clause if replace_from_clause?
+
+ where_clause = relation.where_clause.merge(other.where_clause)
+ relation.where_clause = where_clause unless where_clause.empty?
- def merge_clauses
- CLAUSE_METHOD_NAMES.each do |(reader, writer)|
- clause = relation.send(reader)
- other_clause = other.send(reader)
- relation.send(writer, clause.merge(other_clause))
+ having_clause = relation.having_clause.merge(other.having_clause)
+ relation.having_clause = having_clause unless having_clause.empty?
+ end
+
+ def replace_from_clause?
+ relation.from_clause.empty? && !other.from_clause.empty? &&
+ relation.klass.base_class == other.klass.base_class
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/relation/predicate_builder.rb b/activerecord/lib/active_record/relation/predicate_builder.rb
index 550416238f..240de3bb69 100644
--- a/activerecord/lib/active_record/relation/predicate_builder.rb
+++ b/activerecord/lib/active_record/relation/predicate_builder.rb
@@ -1,14 +1,7 @@
+# frozen_string_literal: true
+
module ActiveRecord
class PredicateBuilder # :nodoc:
- require 'active_record/relation/predicate_builder/array_handler'
- require 'active_record/relation/predicate_builder/association_query_handler'
- require 'active_record/relation/predicate_builder/base_handler'
- require 'active_record/relation/predicate_builder/basic_object_handler'
- require 'active_record/relation/predicate_builder/class_handler'
- require 'active_record/relation/predicate_builder/polymorphic_array_handler'
- require 'active_record/relation/predicate_builder/range_handler'
- require 'active_record/relation/predicate_builder/relation_handler'
-
delegate :resolve_column_aliases, to: :table
def initialize(table)
@@ -16,14 +9,11 @@ module ActiveRecord
@handlers = []
register_handler(BasicObject, BasicObjectHandler.new(self))
- register_handler(Class, ClassHandler.new(self))
register_handler(Base, BaseHandler.new(self))
register_handler(Range, RangeHandler.new(self))
- register_handler(RangeHandler::RangeWithBinds, RangeHandler.new(self))
register_handler(Relation, RelationHandler.new)
register_handler(Array, ArrayHandler.new(self))
- register_handler(AssociationQueryValue, AssociationQueryHandler.new(self))
- register_handler(PolymorphicArrayValue, PolymorphicArrayHandler.new(self))
+ register_handler(Set, ArrayHandler.new(self))
end
def build_from_hash(attributes)
@@ -31,28 +21,13 @@ module ActiveRecord
expand_from_hash(attributes)
end
- def create_binds(attributes)
- attributes = convert_dot_notation_to_hash(attributes)
- create_binds_for_hash(attributes)
- end
-
- def expand(column, value)
- # Find the foreign key when using queries such as:
- # Post.where(author: author)
- #
- # For polymorphic relationships, find the foreign key and type:
- # PriceEstimate.where(estimate_of: treasure)
- value = AssociationQueryHandler.value_for(table, column, value) if table.associated_with?(column)
- build(table.arel_attribute(column), value)
- end
-
def self.references(attributes)
attributes.map do |key, value|
if value.is_a?(Hash)
key
else
key = key.to_s
- key.split('.'.freeze).first if key.include?('.'.freeze)
+ key.split(".").first if key.include?(".")
end
end.compact
end
@@ -73,97 +48,103 @@ module ActiveRecord
end
def build(attribute, value)
- handler_for(value).call(attribute, value)
- end
-
- protected
-
- attr_reader :table
-
- def expand_from_hash(attributes)
- return ["1=0"] if attributes.empty?
-
- attributes.flat_map do |key, value|
- key = key.to_s
- if value.is_a?(Hash)
- associated_predicate_builder(key).expand_from_hash(value)
- else
- expand(key, value)
- end
+ if table.type(attribute.name).force_equality?(value)
+ bind = build_bind_attribute(attribute.name, value)
+ attribute.eq(bind)
+ else
+ handler_for(value).call(attribute, value)
end
end
+ def build_bind_attribute(column_name, value)
+ attr = Relation::QueryAttribute.new(column_name.to_s, value, table.type(column_name))
+ Arel::Nodes::BindParam.new(attr)
+ end
- def create_binds_for_hash(attributes)
- result = attributes.dup
- binds = []
-
- attributes.each do |column_name, value|
- case value
- when Hash
- attrs, bvs = associated_predicate_builder(column_name).create_binds_for_hash(value)
- result[column_name] = attrs
- binds += bvs
- when Relation
- binds += value.bound_attributes
- when Range
- first = value.begin
- last = value.end
- unless first.respond_to?(:infinite?) && first.infinite?
- binds << build_bind_param(column_name, first)
- first = Arel::Nodes::BindParam.new
- end
- unless last.respond_to?(:infinite?) && last.infinite?
- binds << build_bind_param(column_name, last)
- last = Arel::Nodes::BindParam.new
- end
-
- result[column_name] = RangeHandler::RangeWithBinds.new(first, last, value.exclude_end?)
- else
- if can_be_bound?(column_name, value)
- result[column_name] = Arel::Nodes::BindParam.new
- binds << build_bind_param(column_name, value)
+ protected
+ def expand_from_hash(attributes)
+ return ["1=0"] if attributes.empty?
+
+ attributes.flat_map do |key, value|
+ if value.is_a?(Hash) && !table.has_column?(key)
+ associated_predicate_builder(key).expand_from_hash(value)
+ elsif table.associated_with?(key)
+ # Find the foreign key when using queries such as:
+ # Post.where(author: author)
+ #
+ # For polymorphic relationships, find the foreign key and type:
+ # PriceEstimate.where(estimate_of: treasure)
+ associated_table = table.associated_table(key)
+ if associated_table.polymorphic_association?
+ case value.is_a?(Array) ? value.first : value
+ when Base, Relation
+ value = [value] unless value.is_a?(Array)
+ klass = PolymorphicArrayValue
+ end
+ end
+
+ klass ||= AssociationQueryValue
+ queries = klass.new(associated_table, value).queries.map do |query|
+ expand_from_hash(query).reduce(&:and)
+ end
+ queries.reduce(&:or)
+ elsif table.aggregated_with?(key)
+ mapping = table.reflect_on_aggregation(key).mapping
+ values = value.nil? ? [nil] : Array.wrap(value)
+ if mapping.length == 1 || values.empty?
+ column_name, aggr_attr = mapping.first
+ values = values.map do |object|
+ object.respond_to?(aggr_attr) ? object.public_send(aggr_attr) : object
+ end
+ build(table.arel_attribute(column_name), values)
+ else
+ queries = values.map do |object|
+ mapping.map do |field_attr, aggregate_attr|
+ build(table.arel_attribute(field_attr), object.try!(aggregate_attr))
+ end.reduce(&:and)
+ end
+ queries.reduce(&:or)
+ end
+ else
+ build(table.arel_attribute(key), value)
end
end
end
- [result, binds]
- end
-
private
+ attr_reader :table
- def associated_predicate_builder(association_name)
- self.class.new(table.associated_table(association_name))
- end
-
- def convert_dot_notation_to_hash(attributes)
- dot_notation = attributes.keys.select do |s|
- s.respond_to?(:include?) && s.include?(".".freeze)
+ def associated_predicate_builder(association_name)
+ self.class.new(table.associated_table(association_name))
end
- dot_notation.each do |key|
- table_name, column_name = key.split(".".freeze)
- value = attributes.delete(key)
- attributes[table_name] ||= {}
+ def convert_dot_notation_to_hash(attributes)
+ dot_notation = attributes.select do |k, v|
+ k.include?(".") && !v.is_a?(Hash)
+ end
- attributes[table_name] = attributes[table_name].merge(column_name => value)
- end
+ dot_notation.each_key do |key|
+ table_name, column_name = key.split(".")
+ value = attributes.delete(key)
+ attributes[table_name] ||= {}
- attributes
- end
-
- def handler_for(object)
- @handlers.detect { |klass, _| klass === object }.last
- end
+ attributes[table_name] = attributes[table_name].merge(column_name => value)
+ end
- def can_be_bound?(column_name, value)
- !value.nil? &&
- handler_for(value).is_a?(BasicObjectHandler) &&
- !table.associated_with?(column_name)
- end
+ attributes
+ end
- def build_bind_param(column_name, value)
- Relation::QueryAttribute.new(column_name.to_s, value, table.type(column_name))
- end
+ def handler_for(object)
+ @handlers.detect { |klass, _| klass === object }.last
+ end
end
end
+
+require "active_record/relation/predicate_builder/array_handler"
+require "active_record/relation/predicate_builder/base_handler"
+require "active_record/relation/predicate_builder/basic_object_handler"
+require "active_record/relation/predicate_builder/range_handler"
+require "active_record/relation/predicate_builder/relation_handler"
+
+require "active_record/relation/predicate_builder/association_query_value"
+require "active_record/relation/predicate_builder/polymorphic_array_value"
diff --git a/activerecord/lib/active_record/relation/predicate_builder/array_handler.rb b/activerecord/lib/active_record/relation/predicate_builder/array_handler.rb
index 95dbd6a77f..ee2ece1560 100644
--- a/activerecord/lib/active_record/relation/predicate_builder/array_handler.rb
+++ b/activerecord/lib/active_record/relation/predicate_builder/array_handler.rb
@@ -1,3 +1,7 @@
+# frozen_string_literal: true
+
+require "active_support/core_ext/array/extract"
+
module ActiveRecord
class PredicateBuilder
class ArrayHandler # :nodoc:
@@ -6,18 +10,21 @@ module ActiveRecord
end
def call(attribute, value)
- values = value.map { |x| x.is_a?(Base) ? x.id : x }
- nils, values = values.partition(&:nil?)
-
- return attribute.in([]) if values.empty? && nils.empty?
+ return attribute.in([]) if value.empty?
- ranges, values = values.partition { |v| v.is_a?(Range) }
+ values = value.map { |x| x.is_a?(Base) ? x.id : x }
+ nils = values.extract!(&:nil?)
+ ranges = values.extract! { |v| v.is_a?(Range) }
values_predicate =
case values.length
when 0 then NullPredicate
when 1 then predicate_builder.build(attribute, values.first)
- else attribute.in(values)
+ else
+ values.map! do |v|
+ predicate_builder.build_bind_attribute(attribute.name, v)
+ end
+ values.empty? ? NullPredicate : attribute.in(values)
end
unless nils.empty?
@@ -26,18 +33,17 @@ module ActiveRecord
array_predicates = ranges.map { |range| predicate_builder.build(attribute, range) }
array_predicates.unshift(values_predicate)
- array_predicates.inject { |composite, predicate| composite.or(predicate) }
+ array_predicates.inject(&:or)
end
- protected
-
- attr_reader :predicate_builder
+ private
+ attr_reader :predicate_builder
- module NullPredicate # :nodoc:
- def self.or(other)
- other
+ module NullPredicate # :nodoc:
+ def self.or(other)
+ other
+ end
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/relation/predicate_builder/association_query_handler.rb b/activerecord/lib/active_record/relation/predicate_builder/association_query_handler.rb
deleted file mode 100644
index d7fd878265..0000000000
--- a/activerecord/lib/active_record/relation/predicate_builder/association_query_handler.rb
+++ /dev/null
@@ -1,88 +0,0 @@
-module ActiveRecord
- class PredicateBuilder
- class AssociationQueryHandler # :nodoc:
- def self.value_for(table, column, value)
- klass = if table.associated_table(column).polymorphic_association? && ::Array === value && value.first.is_a?(Base)
- PolymorphicArrayValue
- else
- AssociationQueryValue
- end
-
- klass.new(table.associated_table(column), value)
- end
-
- def initialize(predicate_builder)
- @predicate_builder = predicate_builder
- end
-
- def call(attribute, value)
- queries = {}
-
- table = value.associated_table
- if value.base_class
- queries[table.association_foreign_type.to_s] = value.base_class.name
- end
-
- queries[table.association_foreign_key.to_s] = value.ids
- predicate_builder.build_from_hash(queries)
- end
-
- protected
-
- attr_reader :predicate_builder
- end
-
- class AssociationQueryValue # :nodoc:
- attr_reader :associated_table, :value
-
- def initialize(associated_table, value)
- @associated_table = associated_table
- @value = value
- end
-
- def ids
- case value
- when Relation
- value.select(primary_key)
- when Array
- value.map { |v| convert_to_id(v) }
- else
- convert_to_id(value)
- end
- end
-
- def base_class
- if associated_table.polymorphic_association?
- @base_class ||= polymorphic_base_class_from_value
- end
- end
-
- private
-
- def primary_key
- associated_table.association_primary_key(base_class)
- end
-
- def polymorphic_base_class_from_value
- case value
- when Relation
- value.klass.base_class
- when Array
- val = value.compact.first
- val.class.base_class if val.is_a?(Base)
- when Base
- value.class.base_class
- end
- end
-
- def convert_to_id(value)
- case value
- when Base
- value._read_attribute(primary_key)
- else
- value
- end
- end
- end
- end
-end
diff --git a/activerecord/lib/active_record/relation/predicate_builder/association_query_value.rb b/activerecord/lib/active_record/relation/predicate_builder/association_query_value.rb
new file mode 100644
index 0000000000..88cd71cf69
--- /dev/null
+++ b/activerecord/lib/active_record/relation/predicate_builder/association_query_value.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class PredicateBuilder
+ class AssociationQueryValue # :nodoc:
+ def initialize(associated_table, value)
+ @associated_table = associated_table
+ @value = value
+ end
+
+ def queries
+ [associated_table.association_join_foreign_key.to_s => ids]
+ end
+
+ private
+ attr_reader :associated_table, :value
+
+ def ids
+ case value
+ when Relation
+ value.select_values.empty? ? value.select(primary_key) : value
+ when Array
+ value.map { |v| convert_to_id(v) }
+ else
+ convert_to_id(value)
+ end
+ end
+
+ def primary_key
+ associated_table.association_join_primary_key
+ end
+
+ def convert_to_id(value)
+ case value
+ when Base
+ value._read_attribute(primary_key)
+ else
+ value
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/predicate_builder/base_handler.rb b/activerecord/lib/active_record/relation/predicate_builder/base_handler.rb
index 6fa5b16f73..10c5c1a66a 100644
--- a/activerecord/lib/active_record/relation/predicate_builder/base_handler.rb
+++ b/activerecord/lib/active_record/relation/predicate_builder/base_handler.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
class PredicateBuilder
class BaseHandler # :nodoc:
@@ -9,9 +11,8 @@ module ActiveRecord
predicate_builder.build(attribute, value.id)
end
- protected
-
- attr_reader :predicate_builder
+ private
+ attr_reader :predicate_builder
end
end
end
diff --git a/activerecord/lib/active_record/relation/predicate_builder/basic_object_handler.rb b/activerecord/lib/active_record/relation/predicate_builder/basic_object_handler.rb
index 6cec75dc0a..e8c9f60860 100644
--- a/activerecord/lib/active_record/relation/predicate_builder/basic_object_handler.rb
+++ b/activerecord/lib/active_record/relation/predicate_builder/basic_object_handler.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
class PredicateBuilder
class BasicObjectHandler # :nodoc:
@@ -6,12 +8,12 @@ module ActiveRecord
end
def call(attribute, value)
- attribute.eq(value)
+ bind = predicate_builder.build_bind_attribute(attribute.name, value)
+ attribute.eq(bind)
end
- protected
-
- attr_reader :predicate_builder
+ private
+ attr_reader :predicate_builder
end
end
end
diff --git a/activerecord/lib/active_record/relation/predicate_builder/class_handler.rb b/activerecord/lib/active_record/relation/predicate_builder/class_handler.rb
deleted file mode 100644
index ed313fc9d4..0000000000
--- a/activerecord/lib/active_record/relation/predicate_builder/class_handler.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-module ActiveRecord
- class PredicateBuilder
- class ClassHandler # :nodoc:
- def initialize(predicate_builder)
- @predicate_builder = predicate_builder
- end
-
- def call(attribute, value)
- print_deprecation_warning
- predicate_builder.build(attribute, value.name)
- end
-
- protected
-
- attr_reader :predicate_builder
-
- private
-
- def print_deprecation_warning
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
- Passing a class as a value in an Active Record query is deprecated and
- will be removed. Pass a string instead.
- MSG
- end
- end
- end
-end
diff --git a/activerecord/lib/active_record/relation/predicate_builder/polymorphic_array_handler.rb b/activerecord/lib/active_record/relation/predicate_builder/polymorphic_array_handler.rb
deleted file mode 100644
index b6c6240343..0000000000
--- a/activerecord/lib/active_record/relation/predicate_builder/polymorphic_array_handler.rb
+++ /dev/null
@@ -1,57 +0,0 @@
-module ActiveRecord
- class PredicateBuilder
- class PolymorphicArrayHandler # :nodoc:
- def initialize(predicate_builder)
- @predicate_builder = predicate_builder
- end
-
- def call(attribute, value)
- table = value.associated_table
- queries = value.type_to_ids_mapping.map do |type, ids|
- { table.association_foreign_type.to_s => type, table.association_foreign_key.to_s => ids }
- end
-
- predicates = queries.map { |query| predicate_builder.build_from_hash(query) }
-
- if predicates.size > 1
- type_and_ids_predicates = predicates.map { |type_predicate, id_predicate| Arel::Nodes::Grouping.new(type_predicate.and(id_predicate)) }
- type_and_ids_predicates.inject(&:or)
- else
- predicates.first
- end
- end
-
- protected
-
- attr_reader :predicate_builder
- end
-
- class PolymorphicArrayValue # :nodoc:
- attr_reader :associated_table, :values
-
- def initialize(associated_table, values)
- @associated_table = associated_table
- @values = values
- end
-
- def type_to_ids_mapping
- default_hash = Hash.new { |hsh, key| hsh[key] = [] }
- values.each_with_object(default_hash) { |value, hash| hash[base_class(value).name] << convert_to_id(value) }
- end
-
- private
-
- def primary_key(value)
- associated_table.association_primary_key(base_class(value))
- end
-
- def base_class(value)
- value.class.base_class
- end
-
- def convert_to_id(value)
- value._read_attribute(primary_key(value))
- end
- end
- end
-end
diff --git a/activerecord/lib/active_record/relation/predicate_builder/polymorphic_array_value.rb b/activerecord/lib/active_record/relation/predicate_builder/polymorphic_array_value.rb
new file mode 100644
index 0000000000..aae04d9348
--- /dev/null
+++ b/activerecord/lib/active_record/relation/predicate_builder/polymorphic_array_value.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ class PredicateBuilder
+ class PolymorphicArrayValue # :nodoc:
+ def initialize(associated_table, values)
+ @associated_table = associated_table
+ @values = values
+ end
+
+ def queries
+ type_to_ids_mapping.map do |type, ids|
+ {
+ associated_table.association_foreign_type.to_s => type,
+ associated_table.association_foreign_key.to_s => ids
+ }
+ end
+ end
+
+ private
+ attr_reader :associated_table, :values
+
+ def type_to_ids_mapping
+ default_hash = Hash.new { |hsh, key| hsh[key] = [] }
+ values.each_with_object(default_hash) do |value, hash|
+ hash[klass(value).polymorphic_name] << convert_to_id(value)
+ end
+ end
+
+ def primary_key(value)
+ associated_table.association_join_primary_key(klass(value))
+ end
+
+ def klass(value)
+ case value
+ when Base
+ value.class
+ when Relation
+ value.klass
+ end
+ end
+
+ def convert_to_id(value)
+ case value
+ when Base
+ value._read_attribute(primary_key(value))
+ when Relation
+ value.select(primary_key(value))
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/predicate_builder/range_handler.rb b/activerecord/lib/active_record/relation/predicate_builder/range_handler.rb
index 306d4694ae..2ea27c8490 100644
--- a/activerecord/lib/active_record/relation/predicate_builder/range_handler.rb
+++ b/activerecord/lib/active_record/relation/predicate_builder/range_handler.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
class PredicateBuilder
class RangeHandler # :nodoc:
@@ -8,26 +10,13 @@ module ActiveRecord
end
def call(attribute, value)
- if value.begin.respond_to?(:infinite?) && value.begin.infinite?
- if value.end.respond_to?(:infinite?) && value.end.infinite?
- attribute.not_in([])
- elsif value.exclude_end?
- attribute.lt(value.end)
- else
- attribute.lteq(value.end)
- end
- elsif value.end.respond_to?(:infinite?) && value.end.infinite?
- attribute.gteq(value.begin)
- elsif value.exclude_end?
- attribute.gteq(value.begin).and(attribute.lt(value.end))
- else
- attribute.between(value)
- end
+ begin_bind = predicate_builder.build_bind_attribute(attribute.name, value.begin)
+ end_bind = predicate_builder.build_bind_attribute(attribute.name, value.end)
+ attribute.between(RangeWithBinds.new(begin_bind, end_bind, value.exclude_end?))
end
- protected
-
- attr_reader :predicate_builder
+ private
+ attr_reader :predicate_builder
end
end
end
diff --git a/activerecord/lib/active_record/relation/predicate_builder/relation_handler.rb b/activerecord/lib/active_record/relation/predicate_builder/relation_handler.rb
index 8a910a82fe..c8bbfa5051 100644
--- a/activerecord/lib/active_record/relation/predicate_builder/relation_handler.rb
+++ b/activerecord/lib/active_record/relation/predicate_builder/relation_handler.rb
@@ -1,7 +1,13 @@
+# frozen_string_literal: true
+
module ActiveRecord
class PredicateBuilder
class RelationHandler # :nodoc:
def call(attribute, value)
+ if value.eager_loading?
+ value = value.send(:apply_join_dependency)
+ end
+
if value.select_values.empty?
value = value.select(value.arel_attribute(value.klass.primary_key))
end
diff --git a/activerecord/lib/active_record/relation/query_attribute.rb b/activerecord/lib/active_record/relation/query_attribute.rb
index 7ba964e802..cd18f27330 100644
--- a/activerecord/lib/active_record/relation/query_attribute.rb
+++ b/activerecord/lib/active_record/relation/query_attribute.rb
@@ -1,8 +1,10 @@
-require 'active_record/attribute'
+# frozen_string_literal: true
+
+require "active_model/attribute"
module ActiveRecord
class Relation
- class QueryAttribute < Attribute # :nodoc:
+ class QueryAttribute < ActiveModel::Attribute # :nodoc:
def type_cast(value)
value
end
@@ -14,6 +16,35 @@ module ActiveRecord
def with_cast_value(value)
QueryAttribute.new(name, value, type)
end
+
+ def nil?
+ unless value_before_type_cast.is_a?(StatementCache::Substitute)
+ value_before_type_cast.nil? ||
+ type.respond_to?(:subtype, true) && value_for_database.nil?
+ end
+ rescue ::RangeError
+ end
+
+ def infinite?
+ infinity?(value_before_type_cast) || infinity?(value_for_database)
+ rescue ::RangeError
+ end
+
+ def unboundable?
+ if defined?(@_unboundable)
+ @_unboundable
+ else
+ value_for_database unless value_before_type_cast.is_a?(StatementCache::Substitute)
+ @_unboundable = nil
+ end
+ rescue ::RangeError
+ @_unboundable = type.cast(value_before_type_cast) <=> 0
+ end
+
+ private
+ def infinity?(value)
+ value.respond_to?(:infinite?) && value.infinite?
+ end
end
end
end
diff --git a/activerecord/lib/active_record/relation/query_methods.rb b/activerecord/lib/active_record/relation/query_methods.rb
index 4533f3263f..b0535cfff5 100644
--- a/activerecord/lib/active_record/relation/query_methods.rb
+++ b/activerecord/lib/active_record/relation/query_methods.rb
@@ -1,9 +1,10 @@
+# frozen_string_literal: true
+
require "active_record/relation/from_clause"
require "active_record/relation/query_attribute"
require "active_record/relation/where_clause"
require "active_record/relation/where_clause_factory"
-require 'active_model/forbidden_attributes_protection'
-require 'active_support/core_ext/string/filters'
+require "active_model/forbidden_attributes_protection"
module ActiveRecord
module QueryMethods
@@ -55,73 +56,26 @@ module ActiveRecord
end
FROZEN_EMPTY_ARRAY = [].freeze
- Relation::MULTI_VALUE_METHODS.each do |name|
- class_eval <<-CODE, __FILE__, __LINE__ + 1
- def #{name}_values
- @values[:#{name}] || FROZEN_EMPTY_ARRAY
- end
+ FROZEN_EMPTY_HASH = {}.freeze
- def #{name}_values=(values)
- assert_mutability!
- @values[:#{name}] = values
+ Relation::VALUE_METHODS.each do |name|
+ method_name = \
+ case name
+ when *Relation::MULTI_VALUE_METHODS then "#{name}_values"
+ when *Relation::SINGLE_VALUE_METHODS then "#{name}_value"
+ when *Relation::CLAUSE_METHODS then "#{name}_clause"
end
- CODE
- end
-
- (Relation::SINGLE_VALUE_METHODS - [:create_with]).each do |name|
class_eval <<-CODE, __FILE__, __LINE__ + 1
- def #{name}_value # def readonly_value
- @values[:#{name}] # @values[:readonly]
+ def #{method_name} # def includes_values
+ get_value(#{name.inspect}) # get_value(:includes)
end # end
- CODE
- end
- Relation::SINGLE_VALUE_METHODS.each do |name|
- class_eval <<-CODE, __FILE__, __LINE__ + 1
- def #{name}_value=(value) # def readonly_value=(value)
- assert_mutability! # assert_mutability!
- @values[:#{name}] = value # @values[:readonly] = value
+ def #{method_name}=(value) # def includes_values=(value)
+ set_value(#{name.inspect}, value) # set_value(:includes, value)
end # end
CODE
end
- Relation::CLAUSE_METHODS.each do |name|
- class_eval <<-CODE, __FILE__, __LINE__ + 1
- def #{name}_clause # def where_clause
- @values[:#{name}] || new_#{name}_clause # @values[:where] || new_where_clause
- end # end
- #
- def #{name}_clause=(value) # def where_clause=(value)
- assert_mutability! # assert_mutability!
- @values[:#{name}] = value # @values[:where] = value
- end # end
- CODE
- end
-
- def bound_attributes
- result = from_clause.binds + arel.bind_values + where_clause.binds + having_clause.binds
- if limit_value && !string_containing_comma?(limit_value)
- result << Attribute.with_cast_value(
- "LIMIT".freeze,
- connection.sanitize_limit(limit_value),
- Type::Value.new,
- )
- end
- if offset_value
- result << Attribute.with_cast_value(
- "OFFSET".freeze,
- offset_value.to_i,
- Type::Value.new,
- )
- end
- result
- end
-
- FROZEN_EMPTY_HASH = {}.freeze
- def create_with_value # :nodoc:
- @values[:create_with] || FROZEN_EMPTY_HASH
- end
-
alias extensions extending_values
# Specify relationships to be included in the result set. For
@@ -146,7 +100,7 @@ module ActiveRecord
#
# === conditions
#
- # If you want to add conditions to your included models you'll have
+ # If you want to add string conditions to your included models, you'll have
# to explicitly reference them. For example:
#
# User.includes(:posts).where('posts.name = ?', 'example')
@@ -157,6 +111,12 @@ module ActiveRecord
#
# Note that #includes works with association names while #references needs
# the actual table name.
+ #
+ # If you pass the conditions via hash, you don't need to call #references
+ # explicitly, as #where references the tables for you. For example, this
+ # will work correctly:
+ #
+ # User.includes(:posts).where(posts: { name: 'example' })
def includes(*args)
check_if_method_has_arguments!(:includes, args)
spawn.includes!(*args)
@@ -200,6 +160,19 @@ module ActiveRecord
self
end
+ # Extracts a named +association+ from the relation. The named association is first preloaded,
+ # then the individual association records are collected from the relation. Like so:
+ #
+ # account.memberships.extract_associated(:user)
+ # # => Returns collection of User records
+ #
+ # This is short-hand for:
+ #
+ # account.memberships.preload(:user).collect(&:user)
+ def extract_associated(association)
+ preload(association).collect(&association)
+ end
+
# Use to indicate that the given +table_names+ are referenced by an SQL string,
# and should therefore be JOINed in any query rather than loaded separately.
# This method only works in conjunction with #includes.
@@ -225,12 +198,13 @@ module ActiveRecord
# Works in two unique ways.
#
- # First: takes a block so it can be used just like +Array#select+.
+ # First: takes a block so it can be used just like <tt>Array#select</tt>.
#
# Model.all.select { |m| m.field == value }
#
# This will build an array of objects from the database for the scope,
- # converting them into an array and iterating through them using +Array#select+.
+ # converting them into an array and iterating through them using
+ # <tt>Array#select</tt>.
#
# Second: Modifies the SELECT statement for the query so that only certain
# fields are retrieved:
@@ -263,12 +237,20 @@ module ActiveRecord
# Model.select(:field).first.other_field
# # => ActiveModel::MissingAttributeError: missing attribute: other_field
def select(*fields)
- return super if block_given?
- raise ArgumentError, 'Call this with at least one field' if fields.empty?
+ if block_given?
+ if fields.any?
+ raise ArgumentError, "`select' with block doesn't take arguments."
+ end
+
+ return super()
+ end
+
+ raise ArgumentError, "Call `select' with at least one field" if fields.empty?
spawn._select!(*fields)
end
def _select!(*fields) # :nodoc:
+ fields.reject!(&:blank?)
fields.flatten!
fields.map! do |field|
klass.attribute_alias?(field) ? klass.attribute_alias(field).to_sym : field
@@ -277,6 +259,27 @@ module ActiveRecord
self
end
+ # Allows you to change a previously set select statement.
+ #
+ # Post.select(:title, :body)
+ # # SELECT `posts`.`title`, `posts`.`body` FROM `posts`
+ #
+ # Post.select(:title, :body).reselect(:created_at)
+ # # SELECT `posts`.`created_at` FROM `posts`
+ #
+ # This is short-hand for <tt>unscope(:select).select(fields)</tt>.
+ # Note that we're unscoping the entire select statement.
+ def reselect(*args)
+ check_if_method_has_arguments!(:reselect, args)
+ spawn.reselect!(*args)
+ end
+
+ # Same as #reselect but operates on relation in-place instead of copying.
+ def reselect!(*args) # :nodoc:
+ self.select_values = args
+ self
+ end
+
# Allows to specify a group attribute:
#
# User.group(:name)
@@ -333,6 +336,7 @@ module ActiveRecord
spawn.order!(*args)
end
+ # Same as #order but operates on relation in-place instead of copying.
def order!(*args) # :nodoc:
preprocess_order_args(args)
@@ -354,6 +358,7 @@ module ActiveRecord
spawn.reorder!(*args)
end
+ # Same as #reorder but operates on relation in-place instead of copying.
def reorder!(*args) # :nodoc:
preprocess_order_args(args)
@@ -363,8 +368,8 @@ module ActiveRecord
end
VALID_UNSCOPING_VALUES = Set.new([:where, :select, :group, :order, :lock,
- :limit, :offset, :joins, :includes, :from,
- :readonly, :having])
+ :limit, :offset, :joins, :left_outer_joins, :annotate,
+ :includes, :from, :readonly, :having, :optimizer_hints])
# Removes an unwanted relation that is already defined on a chain of relations.
# This is useful when passing around chains of relations and would like to
@@ -411,7 +416,11 @@ module ActiveRecord
args.each do |scope|
case scope
when Symbol
- symbol_unscoping(scope)
+ scope = :left_outer_joins if scope == :left_joins
+ if !VALID_UNSCOPING_VALUES.include?(scope)
+ raise ArgumentError, "Called unscope() with invalid unscoping argument ':#{scope}'. Valid arguments are :#{VALID_UNSCOPING_VALUES.to_a.join(", :")}."
+ end
+ set_value(scope, DEFAULT_VALUES[scope])
when Hash
scope.each do |key, target_value|
if key != :where
@@ -476,20 +485,17 @@ module ActiveRecord
# => SELECT "users".* FROM "users" LEFT OUTER JOIN "posts" ON "posts"."user_id" = "users"."id"
#
def left_outer_joins(*args)
- check_if_method_has_arguments!(:left_outer_joins, args)
-
- args.compact!
- args.flatten!
-
+ check_if_method_has_arguments!(__callee__, args)
spawn.left_outer_joins!(*args)
end
alias :left_joins :left_outer_joins
def left_outer_joins!(*args) # :nodoc:
+ args.compact!
+ args.flatten!
self.left_outer_joins_values += args
self
end
- alias :left_joins! :left_outer_joins!
# Returns a new relation, which is the result of filtering the current relation
# according to the conditions in the arguments.
@@ -652,7 +658,7 @@ module ActiveRecord
# present). Neither relation may have a #limit, #offset, or #distinct set.
#
# Post.where("id = 1").or(Post.where("author_id = 3"))
- # # SELECT `posts`.* FROM `posts` WHERE (('id = 1' OR 'author_id = 3'))
+ # # SELECT `posts`.* FROM `posts` WHERE ((id = 1) OR (author_id = 3))
#
def or(other)
unless other.is_a? Relation
@@ -670,7 +676,8 @@ module ActiveRecord
end
self.where_clause = self.where_clause.or(other.where_clause)
- self.having_clause = self.having_clause.or(other.having_clause)
+ self.having_clause = having_clause.or(other.having_clause)
+ self.references_values += other.references_values
self
end
@@ -701,13 +708,6 @@ module ActiveRecord
end
def limit!(value) # :nodoc:
- if string_containing_comma?(value)
- # Remove `string_containing_comma?` when removing this deprecation
- ActiveSupport::Deprecation.warn(<<-WARNING.squish)
- Passing a string to limit in the form "1,2" is deprecated and will be
- removed in Rails 5.1. Please call `offset` explicitly instead.
- WARNING
- end
self.limit_value = value
self
end
@@ -774,7 +774,7 @@ module ActiveRecord
# end
#
def none
- where("1=0").extending!(NullRelation)
+ spawn.none!
end
def none! # :nodoc:
@@ -818,7 +818,7 @@ module ActiveRecord
value = sanitize_forbidden_attributes(value)
self.create_with_value = create_with_value.merge(value)
else
- self.create_with_value = {}
+ self.create_with_value = FROZEN_EMPTY_HASH
end
self
@@ -859,16 +859,12 @@ module ActiveRecord
def distinct(value = true)
spawn.distinct!(value)
end
- alias uniq distinct
- deprecate uniq: :distinct
# Like #distinct, but modifies relation in place.
def distinct!(value = true) # :nodoc:
self.distinct_value = value
self
end
- alias uniq! distinct!
- deprecate uniq!: :distinct!
# Used to extend a scope with additional methods, either through
# a module or through a block provided.
@@ -924,6 +920,29 @@ module ActiveRecord
self
end
+ # Specify optimizer hints to be used in the SELECT statement.
+ #
+ # Example (for MySQL):
+ #
+ # Topic.optimizer_hints("MAX_EXECUTION_TIME(50000)", "NO_INDEX_MERGE(topics)")
+ # # SELECT /*+ MAX_EXECUTION_TIME(50000) NO_INDEX_MERGE(topics) */ `topics`.* FROM `topics`
+ #
+ # Example (for PostgreSQL with pg_hint_plan):
+ #
+ # Topic.optimizer_hints("SeqScan(topics)", "Parallel(topics 8)")
+ # # SELECT /*+ SeqScan(topics) Parallel(topics 8) */ "topics".* FROM "topics"
+ def optimizer_hints(*args)
+ check_if_method_has_arguments!(:optimizer_hints, args)
+ spawn.optimizer_hints!(*args)
+ end
+
+ def optimizer_hints!(*args) # :nodoc:
+ args.flatten!
+
+ self.optimizer_hints_values += args
+ self
+ end
+
# Reverse the existing order clause on the relation.
#
# User.order('name ASC').reverse_order # generated SQL has 'ORDER BY name DESC'
@@ -938,299 +957,362 @@ module ActiveRecord
self
end
- # Returns the Arel object associated with the relation.
- def arel # :nodoc:
- @arel ||= build_arel
+ def skip_query_cache!(value = true) # :nodoc:
+ self.skip_query_cache_value = value
+ self
end
- private
-
- def assert_mutability!
- raise ImmutableRelation if @loaded
- raise ImmutableRelation if defined?(@arel) && @arel
+ def skip_preloading! # :nodoc:
+ self.skip_preloading_value = true
+ self
end
- def build_arel
- arel = Arel::SelectManager.new(table)
-
- build_joins(arel, joins_values.flatten) unless joins_values.empty?
- build_left_outer_joins(arel, left_outer_joins_values.flatten) unless left_outer_joins_values.empty?
-
- arel.where(where_clause.ast) unless where_clause.empty?
- arel.having(having_clause.ast) unless having_clause.empty?
- if limit_value
- if string_containing_comma?(limit_value)
- arel.take(connection.sanitize_limit(limit_value))
- else
- arel.take(Arel::Nodes::BindParam.new)
- end
- end
- arel.skip(Arel::Nodes::BindParam.new) if offset_value
- arel.group(*arel_columns(group_values.uniq.reject(&:blank?))) unless group_values.empty?
-
- build_order(arel)
-
- build_select(arel)
+ # Adds an SQL comment to queries generated from this relation. For example:
+ #
+ # User.annotate("selecting user names").select(:name)
+ # # SELECT "users"."name" FROM "users" /* selecting user names */
+ #
+ # User.annotate("selecting", "user", "names").select(:name)
+ # # SELECT "users"."name" FROM "users" /* selecting */ /* user */ /* names */
+ #
+ # The SQL block comment delimiters, "/*" and "*/", will be added automatically.
+ def annotate(*args)
+ check_if_method_has_arguments!(:annotate, args)
+ spawn.annotate!(*args)
+ end
- arel.distinct(distinct_value)
- arel.from(build_from) unless from_clause.empty?
- arel.lock(lock_value) if lock_value
+ # Like #annotate, but modifies relation in place.
+ def annotate!(*args) # :nodoc:
+ self.annotate_values += args
+ self
+ end
- arel
+ # Returns the Arel object associated with the relation.
+ def arel(aliases = nil) # :nodoc:
+ @arel ||= build_arel(aliases)
end
- def symbol_unscoping(scope)
- if !VALID_UNSCOPING_VALUES.include?(scope)
- raise ArgumentError, "Called unscope() with invalid unscoping argument ':#{scope}'. Valid arguments are :#{VALID_UNSCOPING_VALUES.to_a.join(", :")}."
+ private
+ # Returns a relation value with a given name
+ def get_value(name)
+ @values.fetch(name, DEFAULT_VALUES[name])
end
- clause_method = Relation::CLAUSE_METHODS.include?(scope)
- multi_val_method = Relation::MULTI_VALUE_METHODS.include?(scope)
- if clause_method
- unscope_code = "#{scope}_clause="
- else
- unscope_code = "#{scope}_value#{'s' if multi_val_method}="
+ # Sets the relation value with the given name
+ def set_value(name, value)
+ assert_mutability!
+ @values[name] = value
end
- case scope
- when :order
- result = []
- else
- result = [] if multi_val_method
+ def assert_mutability!
+ raise ImmutableRelation if @loaded
+ raise ImmutableRelation if defined?(@arel) && @arel
end
- self.send(unscope_code, result)
- end
+ def build_arel(aliases)
+ arel = Arel::SelectManager.new(table)
+
+ aliases = build_joins(arel, joins_values.flatten, aliases) unless joins_values.empty?
+ build_left_outer_joins(arel, left_outer_joins_values.flatten, aliases) unless left_outer_joins_values.empty?
+
+ arel.where(where_clause.ast) unless where_clause.empty?
+ arel.having(having_clause.ast) unless having_clause.empty?
+ if limit_value
+ limit_attribute = ActiveModel::Attribute.with_cast_value(
+ "LIMIT",
+ connection.sanitize_limit(limit_value),
+ Type.default_value,
+ )
+ arel.take(Arel::Nodes::BindParam.new(limit_attribute))
+ end
+ if offset_value
+ offset_attribute = ActiveModel::Attribute.with_cast_value(
+ "OFFSET",
+ offset_value.to_i,
+ Type.default_value,
+ )
+ arel.skip(Arel::Nodes::BindParam.new(offset_attribute))
+ end
+ arel.group(*arel_columns(group_values.uniq.reject(&:blank?))) unless group_values.empty?
- def association_for_table(table_name)
- table_name = table_name.to_s
- @klass._reflect_on_association(table_name) ||
- @klass._reflect_on_association(table_name.singularize)
- end
+ build_order(arel)
- def build_from
- opts = from_clause.value
- name = from_clause.name
- case opts
- when Relation
- name ||= 'subquery'
- opts.arel.as(name.to_s)
- else
- opts
+ build_select(arel)
+
+ arel.optimizer_hints(*optimizer_hints_values) unless optimizer_hints_values.empty?
+ arel.distinct(distinct_value)
+ arel.from(build_from) unless from_clause.empty?
+ arel.lock(lock_value) if lock_value
+ arel.comment(*annotate_values) unless annotate_values.empty?
+
+ arel
end
- end
- def build_left_outer_joins(manager, outer_joins)
- buckets = outer_joins.group_by do |join|
- case join
- when Hash, Symbol, Array
- :association_join
+ def build_from
+ opts = from_clause.value
+ name = from_clause.name
+ case opts
+ when Relation
+ if opts.eager_loading?
+ opts = opts.send(:apply_join_dependency)
+ end
+ name ||= "subquery"
+ opts.arel.as(name.to_s)
else
- raise ArgumentError, 'only Hash, Symbol and Array are allowed'
+ opts
end
end
- build_join_query(manager, buckets, Arel::Nodes::OuterJoin)
- end
+ def build_left_outer_joins(manager, outer_joins, aliases)
+ buckets = outer_joins.group_by do |join|
+ case join
+ when Hash, Symbol, Array
+ :association_join
+ when ActiveRecord::Associations::JoinDependency
+ :stashed_join
+ else
+ raise ArgumentError, "only Hash, Symbol and Array are allowed"
+ end
+ end
- def build_joins(manager, joins)
- buckets = joins.group_by do |join|
- case join
- when String
- :string_join
- when Hash, Symbol, Array
- :association_join
- when ActiveRecord::Associations::JoinDependency
- :stashed_join
- when Arel::Nodes::Join
- :join_node
- else
- raise 'unknown class: %s' % join.class.name
+ build_join_query(manager, buckets, Arel::Nodes::OuterJoin, aliases)
+ end
+
+ def build_joins(manager, joins, aliases)
+ buckets = joins.group_by do |join|
+ case join
+ when String
+ :string_join
+ when Hash, Symbol, Array
+ :association_join
+ when ActiveRecord::Associations::JoinDependency
+ :stashed_join
+ when Arel::Nodes::Join
+ :join_node
+ else
+ raise "unknown class: %s" % join.class.name
+ end
end
+
+ build_join_query(manager, buckets, Arel::Nodes::InnerJoin, aliases)
end
- build_join_query(manager, buckets, Arel::Nodes::InnerJoin)
- end
+ def build_join_query(manager, buckets, join_type, aliases)
+ buckets.default = []
- def build_join_query(manager, buckets, join_type)
- buckets.default = []
+ association_joins = buckets[:association_join]
+ stashed_joins = buckets[:stashed_join]
+ join_nodes = buckets[:join_node].uniq
+ string_joins = buckets[:string_join].map(&:strip).uniq
- association_joins = buckets[:association_join]
- stashed_association_joins = buckets[:stashed_join]
- join_nodes = buckets[:join_node].uniq
- string_joins = buckets[:string_join].map(&:strip).uniq
+ join_list = join_nodes + convert_join_strings_to_ast(string_joins)
+ alias_tracker = alias_tracker(join_list, aliases)
- join_list = join_nodes + convert_join_strings_to_ast(manager, string_joins)
+ join_dependency = construct_join_dependency(association_joins)
- join_dependency = ActiveRecord::Associations::JoinDependency.new(
- @klass,
- association_joins,
- join_list
- )
+ joins = join_dependency.join_constraints(stashed_joins, join_type, alias_tracker)
+ joins.each { |join| manager.from(join) }
- join_infos = join_dependency.join_constraints stashed_association_joins, join_type
+ manager.join_sources.concat(join_list)
- join_infos.each do |info|
- info.joins.each { |join| manager.from(join) }
- manager.bind_values.concat info.binds
+ alias_tracker.aliases
end
- manager.join_sources.concat(join_list)
-
- manager
- end
+ def convert_join_strings_to_ast(joins)
+ joins
+ .flatten
+ .reject(&:blank?)
+ .map { |join| table.create_string_join(Arel.sql(join)) }
+ end
- def convert_join_strings_to_ast(table, joins)
- joins
- .flatten
- .reject(&:blank?)
- .map { |join| table.create_string_join(Arel.sql(join)) }
- end
+ def build_select(arel)
+ if select_values.any?
+ arel.project(*arel_columns(select_values.uniq))
+ elsif klass.ignored_columns.any?
+ arel.project(*klass.column_names.map { |field| arel_attribute(field) })
+ else
+ arel.project(table[Arel.star])
+ end
+ end
- def build_select(arel)
- if select_values.any?
- arel.project(*arel_columns(select_values.uniq))
- else
- arel.project(@klass.arel_table[Arel.star])
+ def arel_columns(columns)
+ columns.flat_map do |field|
+ case field
+ when Symbol
+ field = field.to_s
+ arel_column(field) { connection.quote_table_name(field) }
+ when String
+ arel_column(field) { field }
+ when Proc
+ field.call
+ else
+ field
+ end
+ end
end
- end
- def arel_columns(columns)
- columns.map do |field|
- if (Symbol === field || String === field) && (klass.has_attribute?(field) || klass.attribute_alias?(field)) && !from_clause.value
+ def arel_column(field)
+ field = klass.attribute_alias(field) if klass.attribute_alias?(field)
+ from = from_clause.name || from_clause.value
+
+ if klass.columns_hash.key?(field) && (!from || table_name_matches?(from))
arel_attribute(field)
- elsif Symbol === field
- connection.quote_table_name(field.to_s)
else
- field
+ yield
end
end
- end
- def reverse_sql_order(order_query)
- if order_query.empty?
- return [arel_attribute(primary_key).desc] if primary_key
- raise IrreversibleOrderError,
- "Relation has no current order and table has no primary key to be used as default order"
+ def table_name_matches?(from)
+ /(?:\A|(?<!FROM)\s)(?:\b#{table.name}\b|#{connection.quote_table_name(table.name)})(?!\.)/i.match?(from.to_s)
end
- order_query.flat_map do |o|
- case o
- when Arel::Attribute
- o.desc
- when Arel::Nodes::Ordering
- o.reverse
- when String
- if does_not_support_reverse?(o)
- raise IrreversibleOrderError, "Order #{o.inspect} can not be reversed automatically"
- end
- o.split(',').map! do |s|
- s.strip!
- s.gsub!(/\sasc\Z/i, ' DESC') || s.gsub!(/\sdesc\Z/i, ' ASC') || s.concat(' DESC')
+ def reverse_sql_order(order_query)
+ if order_query.empty?
+ return [arel_attribute(primary_key).desc] if primary_key
+ raise IrreversibleOrderError,
+ "Relation has no current order and table has no primary key to be used as default order"
+ end
+
+ order_query.flat_map do |o|
+ case o
+ when Arel::Attribute
+ o.desc
+ when Arel::Nodes::Ordering
+ o.reverse
+ when String
+ if does_not_support_reverse?(o)
+ raise IrreversibleOrderError, "Order #{o.inspect} cannot be reversed automatically"
+ end
+ o.split(",").map! do |s|
+ s.strip!
+ s.gsub!(/\sasc\Z/i, " DESC") || s.gsub!(/\sdesc\Z/i, " ASC") || (s << " DESC")
+ end
+ else
+ o
end
- else
- o
end
end
- end
- def does_not_support_reverse?(order)
- #uses sql function with multiple arguments
- order =~ /\([^()]*,[^()]*\)/ ||
- # uses "nulls first" like construction
- order =~ /nulls (first|last)\Z/i
- end
+ def does_not_support_reverse?(order)
+ # Account for String subclasses like Arel::Nodes::SqlLiteral that
+ # override methods like #count.
+ order = String.new(order) unless order.instance_of?(String)
- def build_order(arel)
- orders = order_values.uniq
- orders.reject!(&:blank?)
-
- arel.order(*orders) unless orders.empty?
- end
+ # Uses SQL function with multiple arguments.
+ (order.include?(",") && order.split(",").find { |section| section.count("(") != section.count(")") }) ||
+ # Uses "nulls first" like construction.
+ /\bnulls\s+(?:first|last)\b/i.match?(order)
+ end
- VALID_DIRECTIONS = [:asc, :desc, :ASC, :DESC,
- 'asc', 'desc', 'ASC', 'DESC'] # :nodoc:
+ def build_order(arel)
+ orders = order_values.uniq
+ orders.reject!(&:blank?)
- def validate_order_args(args)
- args.each do |arg|
- next unless arg.is_a?(Hash)
- arg.each do |_key, value|
- raise ArgumentError, "Direction \"#{value}\" is invalid. Valid " \
- "directions are: #{VALID_DIRECTIONS.inspect}" unless VALID_DIRECTIONS.include?(value)
- end
+ arel.order(*orders) unless orders.empty?
end
- end
- def preprocess_order_args(order_args)
- order_args.map! do |arg|
- klass.send(:sanitize_sql_for_order, arg)
- end
- order_args.flatten!
- validate_order_args(order_args)
+ VALID_DIRECTIONS = [:asc, :desc, :ASC, :DESC,
+ "asc", "desc", "ASC", "DESC"].to_set # :nodoc:
- references = order_args.grep(String)
- references.map! { |arg| arg =~ /^([a-zA-Z]\w*)\.(\w+)/ && $1 }.compact!
- references!(references) if references.any?
+ def validate_order_args(args)
+ args.each do |arg|
+ next unless arg.is_a?(Hash)
+ arg.each do |_key, value|
+ unless VALID_DIRECTIONS.include?(value)
+ raise ArgumentError,
+ "Direction \"#{value}\" is invalid. Valid directions are: #{VALID_DIRECTIONS.to_a.inspect}"
+ end
+ end
+ end
+ end
- # if a symbol is given we prepend the quoted table name
- order_args.map! do |arg|
- case arg
- when Symbol
- arel_attribute(arg).asc
- when Hash
- arg.map { |field, dir|
- arel_attribute(field).send(dir.downcase)
- }
- else
- arg
+ def preprocess_order_args(order_args)
+ order_args.map! do |arg|
+ klass.sanitize_sql_for_order(arg)
end
- end.flatten!
- end
+ order_args.flatten!
- # Checks to make sure that the arguments are not blank. Note that if some
- # blank-like object were initially passed into the query method, then this
- # method will not raise an error.
- #
- # Example:
- #
- # Post.references() # raises an error
- # Post.references([]) # does not raise an error
- #
- # This particular method should be called with a method_name and the args
- # passed into that method as an input. For example:
- #
- # def references(*args)
- # check_if_method_has_arguments!("references", args)
- # ...
- # end
- def check_if_method_has_arguments!(method_name, args)
- if args.blank?
- raise ArgumentError, "The method .#{method_name}() must contain arguments."
+ @klass.disallow_raw_sql!(
+ order_args.flat_map { |a| a.is_a?(Hash) ? a.keys : a },
+ permit: AttributeMethods::ClassMethods::COLUMN_NAME_WITH_ORDER
+ )
+
+ validate_order_args(order_args)
+
+ references = order_args.grep(String)
+ references.map! { |arg| arg =~ /^\W?(\w+)\W?\./ && $1 }.compact!
+ references!(references) if references.any?
+
+ # if a symbol is given we prepend the quoted table name
+ order_args.map! do |arg|
+ case arg
+ when Symbol
+ arg = arg.to_s
+ arel_column(arg) {
+ Arel.sql(connection.quote_table_name(arg))
+ }.asc
+ when Hash
+ arg.map { |field, dir|
+ case field
+ when Arel::Nodes::SqlLiteral
+ field.send(dir.downcase)
+ else
+ field = field.to_s
+ arel_column(field) {
+ Arel.sql(connection.quote_table_name(field))
+ }.send(dir.downcase)
+ end
+ }
+ else
+ arg
+ end
+ end.flatten!
end
- end
- def structurally_incompatible_values_for_or(other)
- Relation::SINGLE_VALUE_METHODS.reject { |m| send("#{m}_value") == other.send("#{m}_value") } +
- (Relation::MULTI_VALUE_METHODS - [:extending]).reject { |m| send("#{m}_values") == other.send("#{m}_values") } +
- (Relation::CLAUSE_METHODS - [:having, :where]).reject { |m| send("#{m}_clause") == other.send("#{m}_clause") }
- end
+ # Checks to make sure that the arguments are not blank. Note that if some
+ # blank-like object were initially passed into the query method, then this
+ # method will not raise an error.
+ #
+ # Example:
+ #
+ # Post.references() # raises an error
+ # Post.references([]) # does not raise an error
+ #
+ # This particular method should be called with a method_name and the args
+ # passed into that method as an input. For example:
+ #
+ # def references(*args)
+ # check_if_method_has_arguments!("references", args)
+ # ...
+ # end
+ def check_if_method_has_arguments!(method_name, args)
+ if args.blank?
+ raise ArgumentError, "The method .#{method_name}() must contain arguments."
+ end
+ end
- def new_where_clause
- Relation::WhereClause.empty
- end
- alias new_having_clause new_where_clause
+ STRUCTURAL_OR_METHODS = Relation::VALUE_METHODS - [:extending, :where, :having, :unscope, :references]
+ def structurally_incompatible_values_for_or(other)
+ values = other.values
+ STRUCTURAL_OR_METHODS.reject do |method|
+ get_value(method) == values.fetch(method, DEFAULT_VALUES[method])
+ end
+ end
- def where_clause_factory
- @where_clause_factory ||= Relation::WhereClauseFactory.new(klass, predicate_builder)
- end
- alias having_clause_factory where_clause_factory
+ def where_clause_factory
+ @where_clause_factory ||= Relation::WhereClauseFactory.new(klass, predicate_builder)
+ end
+ alias having_clause_factory where_clause_factory
- def new_from_clause
- Relation::FromClause.empty
- end
+ DEFAULT_VALUES = {
+ create_with: FROZEN_EMPTY_HASH,
+ where: Relation::WhereClause.empty,
+ having: Relation::WhereClause.empty,
+ from: Relation::FromClause.empty
+ }
- def string_containing_comma?(value)
- ::String === value && value.include?(",")
- end
+ Relation::MULTI_VALUE_METHODS.each do |value|
+ DEFAULT_VALUES[value] ||= FROZEN_EMPTY_ARRAY
+ end
end
end
diff --git a/activerecord/lib/active_record/relation/record_fetch_warning.rb b/activerecord/lib/active_record/relation/record_fetch_warning.rb
index dbd08811fa..a7d07d23e1 100644
--- a/activerecord/lib/active_record/relation/record_fetch_warning.rb
+++ b/activerecord/lib/active_record/relation/record_fetch_warning.rb
@@ -1,16 +1,18 @@
+# frozen_string_literal: true
+
module ActiveRecord
class Relation
module RecordFetchWarning
# When this module is prepended to ActiveRecord::Relation and
- # `config.active_record.warn_on_records_fetched_greater_than` is
+ # +config.active_record.warn_on_records_fetched_greater_than+ is
# set to an integer, if the number of records a query returns is
- # greater than the value of `warn_on_records_fetched_greater_than`,
+ # greater than the value of +warn_on_records_fetched_greater_than+,
# a warning is logged. This allows for the detection of queries that
# return a large number of records, which could cause memory bloat.
#
# In most cases, fetching large number of records can be performed
# efficiently using the ActiveRecord::Batches methods.
- # See active_record/lib/relation/batches.rb for more information.
+ # See ActiveRecord::Batches for more information.
def exec_queries
QueryRegistry.reset
diff --git a/activerecord/lib/active_record/relation/spawn_methods.rb b/activerecord/lib/active_record/relation/spawn_methods.rb
index d5c18a2a4a..efc4b447aa 100644
--- a/activerecord/lib/active_record/relation/spawn_methods.rb
+++ b/activerecord/lib/active_record/relation/spawn_methods.rb
@@ -1,13 +1,14 @@
-require 'active_support/core_ext/hash/except'
-require 'active_support/core_ext/hash/slice'
-require 'active_record/relation/merger'
+# frozen_string_literal: true
+
+require "active_support/core_ext/hash/except"
+require "active_support/core_ext/hash/slice"
+require "active_record/relation/merger"
module ActiveRecord
module SpawnMethods
-
# This is overridden by Associations::CollectionProxy
def spawn #:nodoc:
- clone
+ already_in_scope? ? klass.all : clone
end
# Merges in the conditions from <tt>other</tt>, if <tt>other</tt> is an ActiveRecord::Relation.
@@ -67,8 +68,8 @@ module ActiveRecord
private
- def relation_with(values) # :nodoc:
- result = Relation.create(klass, table, predicate_builder, values)
+ def relation_with(values)
+ result = Relation.create(klass, values: values)
result.extend(*extending_values) if extending_values.any?
result
end
diff --git a/activerecord/lib/active_record/relation/where_clause.rb b/activerecord/lib/active_record/relation/where_clause.rb
index 2c2d6cfa47..47728aac30 100644
--- a/activerecord/lib/active_record/relation/where_clause.rb
+++ b/activerecord/lib/active_record/relation/where_clause.rb
@@ -1,68 +1,63 @@
+# frozen_string_literal: true
+
module ActiveRecord
class Relation
class WhereClause # :nodoc:
- attr_reader :binds
-
delegate :any?, :empty?, to: :predicates
- def initialize(predicates, binds)
+ def initialize(predicates)
@predicates = predicates
- @binds = binds
end
def +(other)
WhereClause.new(
predicates + other.predicates,
- binds + other.binds,
+ )
+ end
+
+ def -(other)
+ WhereClause.new(
+ predicates - other.predicates,
)
end
def merge(other)
WhereClause.new(
predicates_unreferenced_by(other) + other.predicates,
- non_conflicting_binds(other) + other.binds,
)
end
def except(*columns)
- WhereClause.new(
- predicates_except(columns),
- binds_except(columns),
- )
+ WhereClause.new(except_predicates(columns))
end
def or(other)
- if empty?
- self
- elsif other.empty?
- other
+ left = self - other
+ common = self - left
+ right = other - common
+
+ if left.empty? || right.empty?
+ common
else
- WhereClause.new(
- [ast.or(other.ast)],
- binds + other.binds
+ or_clause = WhereClause.new(
+ [left.ast.or(right.ast)],
)
+ common + or_clause
end
end
def to_h(table_name = nil)
- equalities = predicates.grep(Arel::Nodes::Equality)
+ equalities = equalities(predicates)
if table_name
equalities = equalities.select do |node|
node.left.relation.name == table_name
end
end
- binds = self.binds.map { |attr| [attr.name, attr.value] }.to_h
-
equalities.map { |node|
- name = node.left.name
- [name, binds.fetch(name.to_s) {
- case node.right
- when Array then node.right.map(&:val)
- when Arel::Nodes::Casted, Arel::Nodes::Quoted
- node.right.val
- end
- }]
+ name = node.left.name.to_s
+ value = extract_node_value(node.right)
+ [name, value]
}.to_h
end
@@ -72,102 +67,120 @@ module ActiveRecord
def ==(other)
other.is_a?(WhereClause) &&
- predicates == other.predicates &&
- binds == other.binds
+ predicates == other.predicates
end
def invert
- WhereClause.new(inverted_predicates, binds)
+ WhereClause.new(inverted_predicates)
end
def self.empty
- @empty ||= new([], [])
+ @empty ||= new([])
end
protected
- attr_reader :predicates
+ attr_reader :predicates
- def referenced_columns
- @referenced_columns ||= begin
- equality_nodes = predicates.select { |n| equality_node?(n) }
- Set.new(equality_nodes, &:left)
+ def referenced_columns
+ @referenced_columns ||= begin
+ equality_nodes = predicates.select { |n| equality_node?(n) }
+ Set.new(equality_nodes, &:left)
+ end
end
- end
private
+ def equalities(predicates)
+ equalities = []
+
+ predicates.each do |node|
+ case node
+ when Arel::Nodes::Equality
+ equalities << node
+ when Arel::Nodes::And
+ equalities.concat equalities(node.children)
+ end
+ end
- def predicates_unreferenced_by(other)
- predicates.reject do |n|
- equality_node?(n) && other.referenced_columns.include?(n.left)
+ equalities
end
- end
- def equality_node?(node)
- node.respond_to?(:operator) && node.operator == :==
- end
-
- def non_conflicting_binds(other)
- conflicts = referenced_columns & other.referenced_columns
- conflicts.map! { |node| node.name.to_s }
- binds.reject { |attr| conflicts.include?(attr.name) }
- end
+ def predicates_unreferenced_by(other)
+ predicates.reject do |n|
+ equality_node?(n) && other.referenced_columns.include?(n.left)
+ end
+ end
- def inverted_predicates
- predicates.map { |node| invert_predicate(node) }
- end
+ def equality_node?(node)
+ node.respond_to?(:operator) && node.operator == :==
+ end
- def invert_predicate(node)
- case node
- when NilClass
- raise ArgumentError, 'Invalid argument for .where.not(), got nil.'
- when Arel::Nodes::In
- Arel::Nodes::NotIn.new(node.left, node.right)
- when Arel::Nodes::Equality
- Arel::Nodes::NotEqual.new(node.left, node.right)
- when String
- Arel::Nodes::Not.new(Arel::Nodes::SqlLiteral.new(node))
- else
- Arel::Nodes::Not.new(node)
+ def inverted_predicates
+ predicates.map { |node| invert_predicate(node) }
end
- end
- def predicates_except(columns)
- predicates.reject do |node|
+ def invert_predicate(node)
case node
- when Arel::Nodes::Between, Arel::Nodes::In, Arel::Nodes::NotIn, Arel::Nodes::Equality, Arel::Nodes::NotEqual, Arel::Nodes::LessThan, Arel::Nodes::LessThanOrEqual, Arel::Nodes::GreaterThan, Arel::Nodes::GreaterThanOrEqual
- subrelation = (node.left.kind_of?(Arel::Attributes::Attribute) ? node.left : node.right)
- columns.include?(subrelation.name.to_s)
+ when NilClass
+ raise ArgumentError, "Invalid argument for .where.not(), got nil."
+ when Arel::Nodes::In
+ Arel::Nodes::NotIn.new(node.left, node.right)
+ when Arel::Nodes::IsNotDistinctFrom
+ Arel::Nodes::IsDistinctFrom.new(node.left, node.right)
+ when Arel::Nodes::IsDistinctFrom
+ Arel::Nodes::IsNotDistinctFrom.new(node.left, node.right)
+ when Arel::Nodes::Equality
+ Arel::Nodes::NotEqual.new(node.left, node.right)
+ when String
+ Arel::Nodes::Not.new(Arel::Nodes::SqlLiteral.new(node))
+ else
+ Arel::Nodes::Not.new(node)
end
end
- end
- def binds_except(columns)
- binds.reject do |attr|
- columns.include?(attr.name)
+ def except_predicates(columns)
+ predicates.reject do |node|
+ Arel.fetch_attribute(node) { |attr| columns.include?(attr.name.to_s) }
+ end
end
- end
- def predicates_with_wrapped_sql_literals
- non_empty_predicates.map do |node|
- if Arel::Nodes::Equality === node
- node
- else
- wrap_sql_literal(node)
+ def predicates_with_wrapped_sql_literals
+ non_empty_predicates.map do |node|
+ case node
+ when Arel::Nodes::SqlLiteral, ::String
+ wrap_sql_literal(node)
+ else node
+ end
end
end
- end
- def non_empty_predicates
- predicates - ['']
- end
+ ARRAY_WITH_EMPTY_STRING = [""]
+ def non_empty_predicates
+ predicates - ARRAY_WITH_EMPTY_STRING
+ end
- def wrap_sql_literal(node)
- if ::String === node
- node = Arel.sql(node)
+ def wrap_sql_literal(node)
+ if ::String === node
+ node = Arel.sql(node)
+ end
+ Arel::Nodes::Grouping.new(node)
+ end
+
+ def extract_node_value(node)
+ case node
+ when Array
+ node.map { |v| extract_node_value(v) }
+ when Arel::Nodes::Casted, Arel::Nodes::Quoted
+ node.val
+ when Arel::Nodes::BindParam
+ value = node.value
+ if value.respond_to?(:value_before_type_cast)
+ value.value_before_type_cast
+ else
+ value
+ end
+ end
end
- Arel::Nodes::Grouping.new(node)
- end
end
end
end
diff --git a/activerecord/lib/active_record/relation/where_clause_factory.rb b/activerecord/lib/active_record/relation/where_clause_factory.rb
index c0ccb00b6f..c1b3eea9df 100644
--- a/activerecord/lib/active_record/relation/where_clause_factory.rb
+++ b/activerecord/lib/active_record/relation/where_clause_factory.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
class Relation
class WhereClauseFactory # :nodoc:
@@ -7,31 +9,25 @@ module ActiveRecord
end
def build(opts, other)
- binds = []
-
case opts
when String, Array
- parts = [klass.send(:sanitize_sql, other.empty? ? opts : ([opts] + other))]
+ parts = [klass.sanitize_sql(other.empty? ? opts : ([opts] + other))]
when Hash
attributes = predicate_builder.resolve_column_aliases(opts)
- attributes = klass.send(:expand_hash_conditions_for_aggregates, attributes)
-
- attributes, binds = predicate_builder.create_binds(attributes)
+ attributes.stringify_keys!
parts = predicate_builder.build_from_hash(attributes)
when Arel::Nodes::Node
parts = [opts]
- binds = other
else
raise ArgumentError, "Unsupported argument type: #{opts} (#{opts.class})"
end
- WhereClause.new(parts, binds)
+ WhereClause.new(parts)
end
- protected
-
- attr_reader :klass, :predicate_builder
+ private
+ attr_reader :klass, :predicate_builder
end
end
end
diff --git a/activerecord/lib/active_record/result.rb b/activerecord/lib/active_record/result.rb
index 8e6cd6c82f..da6d10b6ec 100644
--- a/activerecord/lib/active_record/result.rb
+++ b/activerecord/lib/active_record/result.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
###
# This class encapsulates a result returned from calling
@@ -19,7 +21,7 @@ module ActiveRecord
# ]
#
# # Get an array of hashes representing the result (column => value):
- # result.to_hash
+ # result.to_a
# # => [{"id" => 1, "title" => "title_1", "body" => "body_1"},
# {"id" => 2, "title" => "title_2", "body" => "body_2"},
# ...
@@ -32,8 +34,6 @@ module ActiveRecord
class Result
include Enumerable
- IDENTITY_TYPE = Type::Value.new # :nodoc:
-
attr_reader :columns, :rows, :column_types
def initialize(columns, rows, column_types = {})
@@ -43,10 +43,20 @@ module ActiveRecord
@column_types = column_types
end
+ # Returns true if this result set includes the column named +name+
+ def includes_column?(name)
+ @columns.include? name
+ end
+
+ # Returns the number of elements in the rows array.
def length
@rows.length
end
+ # Calls the given block once for each element in row collection, passing
+ # row as parameter.
+ #
+ # Returns an +Enumerator+ if no block is given.
def each
if block_given?
hash_rows.each { |row| yield row }
@@ -56,36 +66,62 @@ module ActiveRecord
end
def to_hash
- hash_rows
+ ActiveSupport::Deprecation.warn(<<-MSG.squish)
+ `ActiveRecord::Result#to_hash` has been renamed to `to_a`.
+ `to_hash` is deprecated and will be removed in Rails 6.1.
+ MSG
+ to_a
end
alias :map! :map
alias :collect! :map
- # Returns true if there are no records.
+ # Returns true if there are no records, otherwise false.
def empty?
rows.empty?
end
+ # Returns an array of hashes representing each row record.
def to_ary
hash_rows
end
+ alias :to_a :to_ary
+
def [](idx)
hash_rows[idx]
end
+ # Returns the first record from the rows collection.
+ # If the rows collection is empty, returns +nil+.
+ def first
+ return nil if @rows.empty?
+ Hash[@columns.zip(@rows.first)]
+ end
+
+ # Returns the last record from the rows collection.
+ # If the rows collection is empty, returns +nil+.
def last
- hash_rows.last
+ return nil if @rows.empty?
+ Hash[@columns.zip(@rows.last)]
end
def cast_values(type_overrides = {}) # :nodoc:
- types = columns.map { |name| column_type(name, type_overrides) }
- result = rows.map do |values|
- types.zip(values).map { |type, value| type.deserialize(value) }
- end
+ if columns.one?
+ # Separated to avoid allocating an array per row
+
+ type = column_type(columns.first, type_overrides)
- columns.one? ? result.map!(&:first) : result
+ rows.map do |(value)|
+ type.deserialize(value)
+ end
+ else
+ types = columns.map { |name| column_type(name, type_overrides) }
+
+ rows.map do |values|
+ Array.new(values.size) { |i| types[i].deserialize(values[i]) }
+ end
+ end
end
def initialize_copy(other)
@@ -97,36 +133,36 @@ module ActiveRecord
private
- def column_type(name, type_overrides = {})
- type_overrides.fetch(name) do
- column_types.fetch(name, IDENTITY_TYPE)
+ def column_type(name, type_overrides = {})
+ type_overrides.fetch(name) do
+ column_types.fetch(name, Type.default_value)
+ end
end
- end
- def hash_rows
- @hash_rows ||=
- begin
- # We freeze the strings to prevent them getting duped when
- # used as keys in ActiveRecord::Base's @attributes hash
- columns = @columns.map { |c| c.dup.freeze }
- @rows.map { |row|
- # In the past we used Hash[columns.zip(row)]
- # though elegant, the verbose way is much more efficient
- # both time and memory wise cause it avoids a big array allocation
- # this method is called a lot and needs to be micro optimised
- hash = {}
-
- index = 0
- length = columns.length
-
- while index < length
- hash[columns[index]] = row[index]
- index += 1
- end
-
- hash
- }
- end
- end
+ def hash_rows
+ @hash_rows ||=
+ begin
+ # We freeze the strings to prevent them getting duped when
+ # used as keys in ActiveRecord::Base's @attributes hash
+ columns = @columns.map(&:-@)
+ length = columns.length
+
+ @rows.map { |row|
+ # In the past we used Hash[columns.zip(row)]
+ # though elegant, the verbose way is much more efficient
+ # both time and memory wise cause it avoids a big array allocation
+ # this method is called a lot and needs to be micro optimised
+ hash = {}
+
+ index = 0
+ while index < length
+ hash[columns[index]] = row[index]
+ index += 1
+ end
+
+ hash
+ }
+ end
+ end
end
end
diff --git a/activerecord/lib/active_record/runtime_registry.rb b/activerecord/lib/active_record/runtime_registry.rb
index 56e88bc661..4975cb8967 100644
--- a/activerecord/lib/active_record/runtime_registry.rb
+++ b/activerecord/lib/active_record/runtime_registry.rb
@@ -1,4 +1,6 @@
-require 'active_support/per_thread_registry'
+# frozen_string_literal: true
+
+require "active_support/per_thread_registry"
module ActiveRecord
# This is a thread locals registry for Active Record. For example:
@@ -12,9 +14,9 @@ module ActiveRecord
class RuntimeRegistry # :nodoc:
extend ActiveSupport::PerThreadRegistry
- attr_accessor :connection_handler, :sql_runtime, :connection_id
+ attr_accessor :connection_handler, :sql_runtime
- [:connection_handler, :sql_runtime, :connection_id].each do |val|
+ [:connection_handler, :sql_runtime].each do |val|
class_eval %{ def self.#{val}; instance.#{val}; end }, __FILE__, __LINE__
class_eval %{ def self.#{val}=(x); instance.#{val}=x; end }, __FILE__, __LINE__
end
diff --git a/activerecord/lib/active_record/sanitization.rb b/activerecord/lib/active_record/sanitization.rb
index a9e1fd0dad..750766714d 100644
--- a/activerecord/lib/active_record/sanitization.rb
+++ b/activerecord/lib/active_record/sanitization.rb
@@ -1,17 +1,10 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Sanitization
extend ActiveSupport::Concern
module ClassMethods
- # Used to sanitize objects before they're used in an SQL SELECT statement.
- # Delegates to {connection.quote}[rdoc-ref:ConnectionAdapters::Quoting#quote].
- def sanitize(object) # :nodoc:
- connection.quote(object)
- end
- alias_method :quote_value, :sanitize
-
- protected
-
# Accepts an array or string of SQL conditions and sanitizes
# them into a valid SQL fragment for a WHERE clause.
#
@@ -34,8 +27,7 @@ module ActiveRecord
else condition
end
end
- alias_method :sanitize_sql, :sanitize_sql_for_conditions
- alias_method :sanitize_conditions, :sanitize_sql
+ alias :sanitize_sql :sanitize_sql_for_conditions
# Accepts an array, hash, or string of SQL conditions and sanitizes
# them into a valid SQL fragment for a SET clause.
@@ -46,12 +38,12 @@ module ActiveRecord
# sanitize_sql_for_assignment(["name=:name and group_id=:group_id", name: nil, group_id: 4])
# # => "name=NULL and group_id=4"
#
- # Post.send(:sanitize_sql_for_assignment, { name: nil, group_id: 4 })
+ # Post.sanitize_sql_for_assignment({ name: nil, group_id: 4 })
# # => "`posts`.`name` = NULL, `posts`.`group_id` = 4"
#
# sanitize_sql_for_assignment("name=NULL and group_id='4'")
# # => "name=NULL and group_id='4'"
- def sanitize_sql_for_assignment(assignments, default_table_name = self.table_name)
+ def sanitize_sql_for_assignment(assignments, default_table_name = table_name)
case assignments
when Array; sanitize_sql_array(assignments)
when Hash; sanitize_sql_hash_for_assignment(assignments, default_table_name)
@@ -68,47 +60,23 @@ module ActiveRecord
# sanitize_sql_for_order("id ASC")
# # => "id ASC"
def sanitize_sql_for_order(condition)
- if condition.is_a?(Array) && condition.first.to_s.include?('?')
- sanitize_sql_array(condition)
+ if condition.is_a?(Array) && condition.first.to_s.include?("?")
+ disallow_raw_sql!([condition.first],
+ permit: AttributeMethods::ClassMethods::COLUMN_NAME_WITH_ORDER
+ )
+
+ # Ensure we aren't dealing with a subclass of String that might
+ # override methods we use (eg. Arel::Nodes::SqlLiteral).
+ if condition.first.kind_of?(String) && !condition.first.instance_of?(String)
+ condition = [String.new(condition.first), *condition[1..-1]]
+ end
+
+ Arel.sql(sanitize_sql_array(condition))
else
condition
end
end
- # Accepts a hash of SQL conditions and replaces those attributes
- # that correspond to a {#composed_of}[rdoc-ref:Aggregations::ClassMethods#composed_of]
- # relationship with their expanded aggregate attribute values.
- #
- # Given:
- #
- # class Person < ActiveRecord::Base
- # composed_of :address, class_name: "Address",
- # mapping: [%w(address_street street), %w(address_city city)]
- # end
- #
- # Then:
- #
- # { address: Address.new("813 abc st.", "chicago") }
- # # => { address_street: "813 abc st.", address_city: "chicago" }
- def expand_hash_conditions_for_aggregates(attrs)
- expanded_attrs = {}
- attrs.each do |attr, value|
- if aggregation = reflect_on_aggregation(attr.to_sym)
- mapping = aggregation.mapping
- mapping.each do |field_attr, aggregate_attr|
- if mapping.size == 1 && !value.respond_to?(aggregate_attr)
- expanded_attrs[field_attr] = value
- else
- expanded_attrs[field_attr] = value.send(aggregate_attr)
- end
- end
- else
- expanded_attrs[attr] = value
- end
- end
- expanded_attrs
- end
-
# Sanitizes a hash of attribute/value pairs into SQL conditions for a SET clause.
#
# sanitize_sql_hash_for_assignment({ status: nil, group_id: 1 }, "posts")
@@ -116,9 +84,10 @@ module ActiveRecord
def sanitize_sql_hash_for_assignment(attrs, table)
c = connection
attrs.map do |attr, value|
- value = type_for_attribute(attr.to_s).serialize(value)
+ type = type_for_attribute(attr)
+ value = type.serialize(type.cast(value))
"#{c.quote_table_name_for_assignment(table, attr)} = #{c.quote(value)}"
- end.join(', ')
+ end.join(", ")
end
# Sanitizes a +string+ so that it is safe to use within an SQL
@@ -153,9 +122,9 @@ module ActiveRecord
# # => "name='foo''bar' and group_id='4'"
def sanitize_sql_array(ary)
statement, *values = ary
- if values.first.is_a?(Hash) && statement =~ /:\w+/
+ if values.first.is_a?(Hash) && /:\w+/.match?(statement)
replace_named_bind_variables(statement, values.first)
- elsif statement.include?('?')
+ elsif statement.include?("?")
replace_bind_variables(statement, values)
elsif statement.blank?
statement
@@ -164,57 +133,54 @@ module ActiveRecord
end
end
- def replace_bind_variables(statement, values) # :nodoc:
- raise_if_bind_arity_mismatch(statement, statement.count('?'), values.size)
- bound = values.dup
- c = connection
- statement.gsub(/\?/) do
- replace_bind_variable(bound.shift, c)
+ private
+ def replace_bind_variables(statement, values)
+ raise_if_bind_arity_mismatch(statement, statement.count("?"), values.size)
+ bound = values.dup
+ c = connection
+ statement.gsub(/\?/) do
+ replace_bind_variable(bound.shift, c)
+ end
end
- end
- def replace_bind_variable(value, c = connection) # :nodoc:
- if ActiveRecord::Relation === value
- value.to_sql
- else
- quote_bound_value(value, c)
+ def replace_bind_variable(value, c = connection)
+ if ActiveRecord::Relation === value
+ value.to_sql
+ else
+ quote_bound_value(value, c)
+ end
end
- end
- def replace_named_bind_variables(statement, bind_vars) # :nodoc:
- statement.gsub(/(:?):([a-zA-Z]\w*)/) do |match|
- if $1 == ':' # skip postgresql casts
- match # return the whole match
- elsif bind_vars.include?(match = $2.to_sym)
- replace_bind_variable(bind_vars[match])
- else
- raise PreparedStatementInvalid, "missing value for :#{match} in #{statement}"
+ def replace_named_bind_variables(statement, bind_vars)
+ statement.gsub(/(:?):([a-zA-Z]\w*)/) do |match|
+ if $1 == ":" # skip postgresql casts
+ match # return the whole match
+ elsif bind_vars.include?(match = $2.to_sym)
+ replace_bind_variable(bind_vars[match])
+ else
+ raise PreparedStatementInvalid, "missing value for :#{match} in #{statement}"
+ end
end
end
- end
- def quote_bound_value(value, c = connection) # :nodoc:
- if value.respond_to?(:map) && !value.acts_like?(:string)
- if value.respond_to?(:empty?) && value.empty?
- c.quote(nil)
+ def quote_bound_value(value, c = connection)
+ if value.respond_to?(:map) && !value.acts_like?(:string)
+ quoted = value.map { |v| c.quote(v) }
+ if quoted.empty?
+ c.quote(nil)
+ else
+ quoted.join(",")
+ end
else
- value.map { |v| c.quote(v) }.join(',')
+ c.quote(value)
end
- else
- c.quote(value)
end
- end
- def raise_if_bind_arity_mismatch(statement, expected, provided) # :nodoc:
- unless expected == provided
- raise PreparedStatementInvalid, "wrong number of bind variables (#{provided} for #{expected}) in: #{statement}"
+ def raise_if_bind_arity_mismatch(statement, expected, provided)
+ unless expected == provided
+ raise PreparedStatementInvalid, "wrong number of bind variables (#{provided} for #{expected}) in: #{statement}"
+ end
end
- end
- end
-
- # TODO: Deprecate this
- def quoted_id # :nodoc:
- self.class.quote_value(@attributes[self.class.primary_key].value_for_database)
end
end
end
diff --git a/activerecord/lib/active_record/schema.rb b/activerecord/lib/active_record/schema.rb
index 784a02d2c3..76bf53387d 100644
--- a/activerecord/lib/active_record/schema.rb
+++ b/activerecord/lib/active_record/schema.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
# = Active Record \Schema
#
@@ -37,10 +39,10 @@ module ActiveRecord
# The +info+ hash is optional, and if given is used to define metadata
# about the current schema (currently, only the schema's version):
#
- # ActiveRecord::Schema.define(version: 20380119000001) do
+ # ActiveRecord::Schema.define(version: 2038_01_19_000001) do
# ...
# end
- def self.define(info={}, &block)
+ def self.define(info = {}, &block)
new.define(info, &block)
end
@@ -48,21 +50,12 @@ module ActiveRecord
instance_eval(&block)
if info[:version].present?
- initialize_schema_migrations_table
- connection.assume_migrated_upto_version(info[:version], migrations_paths)
+ ActiveRecord::SchemaMigration.create_table
+ connection.assume_migrated_upto_version(info[:version])
end
ActiveRecord::InternalMetadata.create_table
- ActiveRecord::InternalMetadata[:environment] = ActiveRecord::Migrator.current_environment
+ ActiveRecord::InternalMetadata[:environment] = connection.migration_context.current_environment
end
-
- private
- # Returns the migrations paths.
- #
- # ActiveRecord::Schema.new.migrations_paths
- # # => ["db/migrate"] # Rails migration path by default.
- def migrations_paths # :nodoc:
- ActiveRecord::Migrator.migrations_paths
- end
end
end
diff --git a/activerecord/lib/active_record/schema_dumper.rb b/activerecord/lib/active_record/schema_dumper.rb
index affcd9aed1..d475e77444 100644
--- a/activerecord/lib/active_record/schema_dumper.rb
+++ b/activerecord/lib/active_record/schema_dumper.rb
@@ -1,4 +1,6 @@
-require 'stringio'
+# frozen_string_literal: true
+
+require "stringio"
module ActiveRecord
# = Active Record Schema Dumper
@@ -11,14 +13,19 @@ module ActiveRecord
##
# :singleton-method:
# A list of tables which should not be dumped to the schema.
- # Acceptable values are strings as well as regexp.
- # This setting is only used if ActiveRecord::Base.schema_format == :ruby
- cattr_accessor :ignore_tables
- @@ignore_tables = []
+ # Acceptable values are strings as well as regexp if ActiveRecord::Base.schema_format == :ruby.
+ # Only strings are accepted if ActiveRecord::Base.schema_format == :sql.
+ cattr_accessor :ignore_tables, default: []
+
+ ##
+ # :singleton-method:
+ # Specify a custom regular expression matching foreign keys which name
+ # should not be dumped to db/schema.rb.
+ cattr_accessor :fk_ignore_pattern, default: /^fk_rails_[0-9a-f]{10}$/
class << self
- def dump(connection=ActiveRecord::Base.connection, stream=STDOUT, config = ActiveRecord::Base)
- new(connection, generate_options(config)).dump(stream)
+ def dump(connection = ActiveRecord::Base.connection, stream = STDOUT, config = ActiveRecord::Base)
+ connection.create_schema_dumper(generate_options(config)).dump(stream)
stream
end
@@ -43,27 +50,32 @@ module ActiveRecord
def initialize(connection, options = {})
@connection = connection
- @version = Migrator::current_version rescue nil
+ @version = connection.migration_context.current_version rescue nil
@options = options
end
- def header(stream)
- define_params = @version ? "version: #{@version}" : ""
+ # turns 20170404131909 into "2017_04_04_131909"
+ def formatted_version
+ stringified = @version.to_s
+ return stringified unless stringified.length == 14
+ stringified.insert(4, "_").insert(7, "_").insert(10, "_")
+ end
- if stream.respond_to?(:external_encoding) && stream.external_encoding
- stream.puts "# encoding: #{stream.external_encoding.name}"
- end
+ def define_params
+ @version ? "version: #{formatted_version}" : ""
+ end
+ def header(stream)
stream.puts <<HEADER
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
-# Note that this schema.rb definition is the authoritative source for your
-# database schema. If you need to create the application database on another
-# system, you should be using db:schema:load, not running all the migrations
-# from scratch. The latter is a flawed and unsustainable approach (the more migrations
-# you'll amass, the slower it'll run and the greater likelihood for issues).
+# This file is the source Rails uses to define your schema when running `rails
+# db:schema:load`. When creating a new database, `rails db:schema:load` tends to
+# be faster and is potentially less error prone than running all of your
+# migrations from scratch. Old migrations may fail to apply correctly if those
+# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
@@ -76,20 +88,12 @@ HEADER
stream.puts "end"
end
+ # extensions are only supported by PostgreSQL
def extensions(stream)
- return unless @connection.supports_extensions?
- extensions = @connection.extensions
- if extensions.any?
- stream.puts " # These are extensions that must be enabled in order to support this database"
- extensions.each do |extension|
- stream.puts " enable_extension #{extension.inspect}"
- end
- stream.puts
- end
end
def tables(stream)
- sorted_tables = @connection.data_sources.sort - @connection.views
+ sorted_tables = @connection.tables.sort
sorted_tables.each do |table_name|
table(table_name, stream) unless ignored?(table_name)
@@ -109,77 +113,46 @@ HEADER
tbl = StringIO.new
# first dump primary key column
- if @connection.respond_to?(:primary_keys)
- pk = @connection.primary_keys(table)
- pk = pk.first unless pk.size > 1
- else
- pk = @connection.primary_key(table)
- end
+ pk = @connection.primary_key(table)
tbl.print " create_table #{remove_prefix_and_suffix(table).inspect}"
case pk
when String
- tbl.print ", primary_key: #{pk.inspect}" unless pk == 'id'
+ tbl.print ", primary_key: #{pk.inspect}" unless pk == "id"
pkcol = columns.detect { |c| c.name == pk }
- pkcolspec = @connection.column_spec_for_primary_key(pkcol)
+ pkcolspec = column_spec_for_primary_key(pkcol)
if pkcolspec.present?
- pkcolspec.each do |key, value|
- tbl.print ", #{key}: #{value}"
- end
+ tbl.print ", #{format_colspec(pkcolspec)}"
end
when Array
tbl.print ", primary_key: #{pk.inspect}"
else
tbl.print ", id: false"
end
- tbl.print ", force: :cascade"
table_options = @connection.table_options(table)
- tbl.print ", options: #{table_options.inspect}" unless table_options.blank?
+ if table_options.present?
+ tbl.print ", #{format_options(table_options)}"
+ end
- tbl.puts " do |t|"
+ tbl.puts ", force: :cascade do |t|"
# then dump all non-primary key columns
- column_specs = columns.map do |column|
+ columns.each do |column|
raise StandardError, "Unknown type '#{column.sql_type}' for column '#{column.name}'" unless @connection.valid_type?(column.type)
next if column.name == pk
- @connection.column_spec(column)
- end.compact
-
- # find all migration keys used in this table
- keys = @connection.migration_keys
-
- # figure out the lengths for each column based on above keys
- lengths = keys.map { |key|
- column_specs.map { |spec|
- spec[key] ? spec[key].length + 2 : 0
- }.max
- }
-
- # the string we're going to sprintf our values against, with standardized column widths
- format_string = lengths.map{ |len| "%-#{len}s" }
-
- # find the max length for the 'type' column, which is special
- type_length = column_specs.map{ |column| column[:type].length }.max
-
- # add column type definition to our format string
- format_string.unshift " t.%-#{type_length}s "
-
- format_string *= ''
-
- column_specs.each do |colspec|
- values = keys.zip(lengths).map{ |key, len| colspec.key?(key) ? colspec[key] + ", " : " " * len }
- values.unshift colspec[:type]
- tbl.print((format_string % values).gsub(/,\s*$/, ''))
+ type, colspec = column_spec(column)
+ tbl.print " t.#{type} #{column.name.inspect}"
+ tbl.print ", #{format_colspec(colspec)}" if colspec.present?
tbl.puts
end
+ indexes_in_create(table, tbl)
+
tbl.puts " end"
tbl.puts
- indexes(table, tbl)
-
tbl.rewind
stream.print tbl.read
rescue => e
@@ -187,30 +160,14 @@ HEADER
stream.puts "# #{e.message}"
stream.puts
end
-
- stream
end
+ # Keep it for indexing materialized views
def indexes(table, stream)
if (indexes = @connection.indexes(table)).any?
add_index_statements = indexes.map do |index|
- statement_parts = [
- "add_index #{remove_prefix_and_suffix(index.table).inspect}",
- index.columns.inspect,
- "name: #{index.name.inspect}",
- ]
- statement_parts << 'unique: true' if index.unique
-
- index_lengths = (index.lengths || []).compact
- statement_parts << "length: #{Hash[index.columns.zip(index.lengths)].inspect}" if index_lengths.any?
-
- index_orders = index.orders || {}
- statement_parts << "order: #{index.orders.inspect}" if index_orders.any?
- statement_parts << "where: #{index.where.inspect}" if index.where
- statement_parts << "using: #{index.using.inspect}" if index.using
- statement_parts << "type: #{index.type.inspect}" if index.type
-
- " #{statement_parts.join(', ')}"
+ table_name = remove_prefix_and_suffix(index.table).inspect
+ " add_index #{([table_name] + index_parts(index)).join(', ')}"
end
stream.puts add_index_statements.sort.join("\n")
@@ -218,6 +175,31 @@ HEADER
end
end
+ def indexes_in_create(table, stream)
+ if (indexes = @connection.indexes(table)).any?
+ index_statements = indexes.map do |index|
+ " t.index #{index_parts(index).join(', ')}"
+ end
+ stream.puts index_statements.sort.join("\n")
+ end
+ end
+
+ def index_parts(index)
+ index_parts = [
+ index.columns.inspect,
+ "name: #{index.name.inspect}",
+ ]
+ index_parts << "unique: true" if index.unique
+ index_parts << "length: #{format_index_parts(index.lengths)}" if index.lengths.present?
+ index_parts << "order: #{format_index_parts(index.orders)}" if index.orders.present?
+ index_parts << "opclass: #{format_index_parts(index.opclasses)}" if index.opclasses.present?
+ index_parts << "where: #{index.where.inspect}" if index.where
+ index_parts << "using: #{index.using.inspect}" if !@connection.default_index_type?(index)
+ index_parts << "type: #{index.type.inspect}" if index.type
+ index_parts << "comment: #{index.comment.inspect}" if index.comment
+ index_parts
+ end
+
def foreign_keys(table, stream)
if (foreign_keys = @connection.foreign_keys(table)).any?
add_foreign_key_statements = foreign_keys.map do |foreign_key|
@@ -234,7 +216,7 @@ HEADER
parts << "primary_key: #{foreign_key.primary_key.inspect}"
end
- if foreign_key.name !~ /^fk_rails_[0-9a-f]{10}$/
+ if foreign_key.export_name_on_schema_dump?
parts << "name: #{foreign_key.name.inspect}"
end
@@ -248,8 +230,26 @@ HEADER
end
end
+ def format_colspec(colspec)
+ colspec.map { |key, value| "#{key}: #{value}" }.join(", ")
+ end
+
+ def format_options(options)
+ options.map { |key, value| "#{key}: #{value.inspect}" }.join(", ")
+ end
+
+ def format_index_parts(options)
+ if options.is_a?(Hash)
+ "{ #{format_options(options)} }"
+ else
+ options.inspect
+ end
+ end
+
def remove_prefix_and_suffix(table)
- table.gsub(/^(#{@options[:table_name_prefix]})(.+)(#{@options[:table_name_suffix]})$/, "\\2")
+ prefix = Regexp.escape(@options[:table_name_prefix].to_s)
+ suffix = Regexp.escape(@options[:table_name_suffix].to_s)
+ table.sub(/\A#{prefix}(.+)#{suffix}\z/, "\\1")
end
def ignored?(table_name)
diff --git a/activerecord/lib/active_record/schema_migration.rb b/activerecord/lib/active_record/schema_migration.rb
index b6cb233e03..1fca1a18f6 100644
--- a/activerecord/lib/active_record/schema_migration.rb
+++ b/activerecord/lib/active_record/schema_migration.rb
@@ -1,5 +1,7 @@
-require 'active_record/scoping/default'
-require 'active_record/scoping/named'
+# frozen_string_literal: true
+
+require "active_record/scoping/default"
+require "active_record/scoping/named"
module ActiveRecord
# This class is used to create a table that keeps track of which migrations
@@ -8,6 +10,10 @@ module ActiveRecord
# to be executed the next time.
class SchemaMigration < ActiveRecord::Base # :nodoc:
class << self
+ def _internal?
+ true
+ end
+
def primary_key
"version"
end
@@ -17,7 +23,7 @@ module ActiveRecord
end
def table_exists?
- ActiveSupport::Deprecation.silence { connection.table_exists?(table_name) }
+ connection.table_exists?(table_name)
end
def create_table
@@ -39,7 +45,11 @@ module ActiveRecord
end
def normalized_versions
- pluck(:version).map { |v| normalize_migration_number v }
+ all_versions.map { |v| normalize_migration_number v }
+ end
+
+ def all_versions
+ order(:version).pluck(:version)
end
end
diff --git a/activerecord/lib/active_record/scoping.rb b/activerecord/lib/active_record/scoping.rb
index 7794af8ca4..35e9dcbffc 100644
--- a/activerecord/lib/active_record/scoping.rb
+++ b/activerecord/lib/active_record/scoping.rb
@@ -1,4 +1,6 @@
-require 'active_support/per_thread_registry'
+# frozen_string_literal: true
+
+require "active_support/per_thread_registry"
module ActiveRecord
module Scoping
@@ -9,33 +11,32 @@ module ActiveRecord
include Named
end
- module ClassMethods
- def current_scope #:nodoc:
- ScopeRegistry.value_for(:current_scope, self)
- end
-
- def current_scope=(scope) #:nodoc:
- ScopeRegistry.set_value_for(:current_scope, self, scope)
- end
-
+ module ClassMethods # :nodoc:
# Collects attributes from scopes that should be applied when creating
# an AR instance for the particular class this is called on.
- def scope_attributes # :nodoc:
+ def scope_attributes
all.scope_for_create
end
# Are there attributes associated with this scope?
- def scope_attributes? # :nodoc:
+ def scope_attributes?
current_scope
end
+
+ def current_scope(skip_inherited_scope = false)
+ ScopeRegistry.value_for(:current_scope, self, skip_inherited_scope)
+ end
+
+ def current_scope=(scope)
+ ScopeRegistry.set_value_for(:current_scope, self, scope)
+ end
end
def populate_with_current_scope_attributes # :nodoc:
return unless self.class.scope_attributes?
- self.class.scope_attributes.each do |att,value|
- send("#{att}=", value) if respond_to?("#{att}=")
- end
+ attributes = self.class.scope_attributes
+ _assign_attributes(attributes) if attributes.any?
end
def initialize_internals_callback # :nodoc:
@@ -75,8 +76,9 @@ module ActiveRecord
end
# Obtains the value for a given +scope_type+ and +model+.
- def value_for(scope_type, model)
+ def value_for(scope_type, model, skip_inherited_scope = false)
raise_invalid_scope_type!(scope_type)
+ return @registry[scope_type][model.name] if skip_inherited_scope
klass = model
base = model.base_class
while klass <= base
@@ -94,11 +96,11 @@ module ActiveRecord
private
- def raise_invalid_scope_type!(scope_type)
- if !VALID_SCOPE_TYPES.include?(scope_type)
- raise ArgumentError, "Invalid scope type '#{scope_type}' sent to the registry. Scope types must be included in VALID_SCOPE_TYPES"
+ def raise_invalid_scope_type!(scope_type)
+ if !VALID_SCOPE_TYPES.include?(scope_type)
+ raise ArgumentError, "Invalid scope type '#{scope_type}' sent to the registry. Scope types must be included in VALID_SCOPE_TYPES"
+ end
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/scoping/default.rb b/activerecord/lib/active_record/scoping/default.rb
index 9eab59ac78..87bcfd5181 100644
--- a/activerecord/lib/active_record/scoping/default.rb
+++ b/activerecord/lib/active_record/scoping/default.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Scoping
module Default
@@ -5,11 +7,8 @@ module ActiveRecord
included do
# Stores the default scope for the class.
- class_attribute :default_scopes, instance_writer: false, instance_predicate: false
- class_attribute :default_scope_override, instance_writer: false, instance_predicate: false
-
- self.default_scopes = []
- self.default_scope_override = nil
+ class_attribute :default_scopes, instance_writer: false, instance_predicate: false, default: []
+ class_attribute :default_scope_override, instance_writer: false, instance_predicate: false, default: nil
end
module ClassMethods
@@ -44,105 +43,108 @@ module ActiveRecord
self.current_scope = nil
end
- protected
+ private
+
+ # Use this macro in your model to set a default scope for all operations on
+ # the model.
+ #
+ # class Article < ActiveRecord::Base
+ # default_scope { where(published: true) }
+ # end
+ #
+ # Article.all # => SELECT * FROM articles WHERE published = true
+ #
+ # The #default_scope is also applied while creating/building a record.
+ # It is not applied while updating a record.
+ #
+ # Article.new.published # => true
+ # Article.create.published # => true
+ #
+ # (You can also pass any object which responds to +call+ to the
+ # +default_scope+ macro, and it will be called when building the
+ # default scope.)
+ #
+ # If you use multiple #default_scope declarations in your model then
+ # they will be merged together:
+ #
+ # class Article < ActiveRecord::Base
+ # default_scope { where(published: true) }
+ # default_scope { where(rating: 'G') }
+ # end
+ #
+ # Article.all # => SELECT * FROM articles WHERE published = true AND rating = 'G'
+ #
+ # This is also the case with inheritance and module includes where the
+ # parent or module defines a #default_scope and the child or including
+ # class defines a second one.
+ #
+ # If you need to do more complex things with a default scope, you can
+ # alternatively define it as a class method:
+ #
+ # class Article < ActiveRecord::Base
+ # def self.default_scope
+ # # Should return a scope, you can call 'super' here etc.
+ # end
+ # end
+ def default_scope(scope = nil, &block) # :doc:
+ scope = block if block_given?
+
+ if scope.is_a?(Relation) || !scope.respond_to?(:call)
+ raise ArgumentError,
+ "Support for calling #default_scope without a block is removed. For example instead " \
+ "of `default_scope where(color: 'red')`, please use " \
+ "`default_scope { where(color: 'red') }`. (Alternatively you can just redefine " \
+ "self.default_scope.)"
+ end
- # Use this macro in your model to set a default scope for all operations on
- # the model.
- #
- # class Article < ActiveRecord::Base
- # default_scope { where(published: true) }
- # end
- #
- # Article.all # => SELECT * FROM articles WHERE published = true
- #
- # The #default_scope is also applied while creating/building a record.
- # It is not applied while updating a record.
- #
- # Article.new.published # => true
- # Article.create.published # => true
- #
- # (You can also pass any object which responds to +call+ to the
- # +default_scope+ macro, and it will be called when building the
- # default scope.)
- #
- # If you use multiple #default_scope declarations in your model then
- # they will be merged together:
- #
- # class Article < ActiveRecord::Base
- # default_scope { where(published: true) }
- # default_scope { where(rating: 'G') }
- # end
- #
- # Article.all # => SELECT * FROM articles WHERE published = true AND rating = 'G'
- #
- # This is also the case with inheritance and module includes where the
- # parent or module defines a #default_scope and the child or including
- # class defines a second one.
- #
- # If you need to do more complex things with a default scope, you can
- # alternatively define it as a class method:
- #
- # class Article < ActiveRecord::Base
- # def self.default_scope
- # # Should return a scope, you can call 'super' here etc.
- # end
- # end
- def default_scope(scope = nil)
- scope = Proc.new if block_given?
-
- if scope.is_a?(Relation) || !scope.respond_to?(:call)
- raise ArgumentError,
- "Support for calling #default_scope without a block is removed. For example instead " \
- "of `default_scope where(color: 'red')`, please use " \
- "`default_scope { where(color: 'red') }`. (Alternatively you can just redefine " \
- "self.default_scope.)"
+ self.default_scopes += [scope]
end
- self.default_scopes += [scope]
- end
-
- def build_default_scope(base_rel = nil) # :nodoc:
- return if abstract_class?
+ def build_default_scope(relation = relation())
+ return if abstract_class?
- if self.default_scope_override.nil?
- self.default_scope_override = !Base.is_a?(method(:default_scope).owner)
- end
+ if default_scope_override.nil?
+ self.default_scope_override = !Base.is_a?(method(:default_scope).owner)
+ end
- if self.default_scope_override
- # The user has defined their own default scope method, so call that
- evaluate_default_scope { default_scope }
- elsif default_scopes.any?
- base_rel ||= relation
- evaluate_default_scope do
- default_scopes.inject(base_rel) do |default_scope, scope|
- scope = scope.respond_to?(:to_proc) ? scope : scope.method(:call)
- default_scope.merge(base_rel.instance_exec(&scope))
+ if default_scope_override
+ # The user has defined their own default scope method, so call that
+ evaluate_default_scope do
+ if scope = default_scope
+ relation.merge!(scope)
+ end
+ end
+ elsif default_scopes.any?
+ evaluate_default_scope do
+ default_scopes.inject(relation) do |default_scope, scope|
+ scope = scope.respond_to?(:to_proc) ? scope : scope.method(:call)
+ default_scope.instance_exec(&scope) || default_scope
+ end
end
end
end
- end
- def ignore_default_scope? # :nodoc:
- ScopeRegistry.value_for(:ignore_default_scope, base_class)
- end
+ def ignore_default_scope?
+ ScopeRegistry.value_for(:ignore_default_scope, base_class)
+ end
- def ignore_default_scope=(ignore) # :nodoc:
- ScopeRegistry.set_value_for(:ignore_default_scope, base_class, ignore)
- end
+ def ignore_default_scope=(ignore)
+ ScopeRegistry.set_value_for(:ignore_default_scope, base_class, ignore)
+ end
- # The ignore_default_scope flag is used to prevent an infinite recursion
- # situation where a default scope references a scope which has a default
- # scope which references a scope...
- def evaluate_default_scope # :nodoc:
- return if ignore_default_scope?
-
- begin
- self.ignore_default_scope = true
- yield
- ensure
- self.ignore_default_scope = false
+ # The ignore_default_scope flag is used to prevent an infinite recursion
+ # situation where a default scope references a scope which has a default
+ # scope which references a scope...
+ def evaluate_default_scope
+ return if ignore_default_scope?
+
+ begin
+ self.ignore_default_scope = true
+ yield
+ ensure
+ self.ignore_default_scope = false
+ end
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/scoping/named.rb b/activerecord/lib/active_record/scoping/named.rb
index 5395bd6076..681a5c6250 100644
--- a/activerecord/lib/active_record/scoping/named.rb
+++ b/activerecord/lib/active_record/scoping/named.rb
@@ -1,6 +1,8 @@
-require 'active_support/core_ext/array'
-require 'active_support/core_ext/hash/except'
-require 'active_support/core_ext/kernel/singleton_class'
+# frozen_string_literal: true
+
+require "active_support/core_ext/array"
+require "active_support/core_ext/hash/except"
+require "active_support/core_ext/kernel/singleton_class"
module ActiveRecord
# = Active Record \Named \Scopes
@@ -22,27 +24,51 @@ module ActiveRecord
# You can define a scope that applies to all finders using
# {default_scope}[rdoc-ref:Scoping::Default::ClassMethods#default_scope].
def all
- if current_scope
- current_scope.clone
+ scope = current_scope
+
+ if scope
+ if scope._deprecated_scope_source
+ ActiveSupport::Deprecation.warn(<<~MSG.squish)
+ Class level methods will no longer inherit scoping from `#{scope._deprecated_scope_source}`
+ in Rails 6.1. To continue using the scoped relation, pass it into the block directly.
+ To instead access the full set of models, as Rails 6.1 will, use `#{name}.unscoped`.
+ MSG
+ end
+
+ if self == scope.klass
+ scope.clone
+ else
+ relation.merge!(scope)
+ end
else
default_scoped
end
end
- def default_scoped # :nodoc:
- scope = build_default_scope
+ def scope_for_association(scope = relation) # :nodoc:
+ if current_scope&.empty_scope?
+ scope
+ else
+ default_scoped(scope)
+ end
+ end
- if scope
- relation.spawn.merge!(scope)
+ def default_scoped(scope = relation) # :nodoc:
+ build_default_scope(scope) || scope
+ end
+
+ def default_extensions # :nodoc:
+ if scope = current_scope || build_default_scope
+ scope.extensions
else
- relation
+ []
end
end
# Adds a class method for retrieving and querying objects.
# The method is intended to return an ActiveRecord::Relation
# object, which is composable with other scopes.
- # If it returns nil or false, an
+ # If it returns +nil+ or +false+, an
# {all}[rdoc-ref:Scoping::Named::ClassMethods#all] scope is returned instead.
#
# A \scope represents a narrowing of a database query, such as
@@ -142,7 +168,7 @@ module ActiveRecord
# Article.featured.titles
def scope(name, body, &block)
unless body.respond_to?(:call)
- raise ArgumentError, 'The scope body needs to be callable.'
+ raise ArgumentError, "The scope body needs to be callable."
end
if dangerous_class_method?(name)
@@ -151,34 +177,40 @@ module ActiveRecord
"a class method with the same name."
end
+ if method_defined_within?(name, Relation)
+ raise ArgumentError, "You tried to define a scope named \"#{name}\" " \
+ "on the model \"#{self.name}\", but ActiveRecord::Relation already defined " \
+ "an instance method with the same name."
+ end
+
valid_scope_name?(name)
extension = Module.new(&block) if block
if body.respond_to?(:to_proc)
- singleton_class.send(:define_method, name) do |*args|
- scope = all.scoping { instance_exec(*args, &body) }
+ singleton_class.define_method(name) do |*args|
+ scope = all._exec_scope(name, *args, &body)
scope = scope.extending(extension) if extension
-
- scope || all
+ scope
end
else
- singleton_class.send(:define_method, name) do |*args|
- scope = all.scoping { body.call(*args) }
+ singleton_class.define_method(name) do |*args|
+ scope = body.call(*args) || all
scope = scope.extending(extension) if extension
-
- scope || all
+ scope
end
end
+
+ generate_relation_method(name)
end
- protected
+ private
- def valid_scope_name?(name)
- if respond_to?(name, true)
- logger.warn "Creating scope :#{name}. " \
- "Overwriting existing method #{self.name}.#{name}."
+ def valid_scope_name?(name)
+ if respond_to?(name, true) && logger
+ logger.warn "Creating scope :#{name}. " \
+ "Overwriting existing method #{self.name}.#{name}."
+ end
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/secure_token.rb b/activerecord/lib/active_record/secure_token.rb
index 8abda2ac49..bcdb33901b 100644
--- a/activerecord/lib/active_record/secure_token.rb
+++ b/activerecord/lib/active_record/secure_token.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module SecureToken
extend ActiveSupport::Concern
@@ -25,9 +27,9 @@ module ActiveRecord
# You're encouraged to add a unique index in the database to deal with this even more unlikely scenario.
def has_secure_token(attribute = :token)
# Load securerandom only when has_secure_token is used.
- require 'active_support/core_ext/securerandom'
+ require "active_support/core_ext/securerandom"
define_method("regenerate_#{attribute}") { update! attribute => self.class.generate_unique_secure_token }
- before_create { self.send("#{attribute}=", self.class.generate_unique_secure_token) unless self.send("#{attribute}?")}
+ before_create { send("#{attribute}=", self.class.generate_unique_secure_token) unless send("#{attribute}?") }
end
def generate_unique_secure_token
diff --git a/activerecord/lib/active_record/serialization.rb b/activerecord/lib/active_record/serialization.rb
index 5a408e7b8e..741fea43ce 100644
--- a/activerecord/lib/active_record/serialization.rb
+++ b/activerecord/lib/active_record/serialization.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord #:nodoc:
# = Active Record \Serialization
module Serialization
@@ -9,7 +11,7 @@ module ActiveRecord #:nodoc:
end
def serializable_hash(options = nil)
- options = options.try(:clone) || {}
+ options = options.try(:dup) || {}
options[:except] = Array(options[:except]).map(&:to_s)
options[:except] |= Array(self.class.inheritance_column)
diff --git a/activerecord/lib/active_record/statement_cache.rb b/activerecord/lib/active_record/statement_cache.rb
index 6c896ccea6..93bce15230 100644
--- a/activerecord/lib/active_record/statement_cache.rb
+++ b/activerecord/lib/active_record/statement_cache.rb
@@ -1,5 +1,6 @@
-module ActiveRecord
+# frozen_string_literal: true
+module ActiveRecord
# Statement cache is used to cache a single statement in order to avoid creating the AST again.
# Initializing the cache is done by passing the statement in the create block:
#
@@ -8,12 +9,12 @@ module ActiveRecord
# end
#
# The cached statement is executed by using the
- # [connection.execute]{rdoc-ref:ConnectionAdapters::DatabaseStatements#execute} method:
+ # {connection.execute}[rdoc-ref:ConnectionAdapters::DatabaseStatements#execute] method:
#
- # cache.execute([], Book, Book.connection)
+ # cache.execute([], Book.connection)
#
# The relation returned by the block is cached, and for each
- # [execute]{rdoc-ref:ConnectionAdapters::DatabaseStatements#execute}
+ # {execute}[rdoc-ref:ConnectionAdapters::DatabaseStatements#execute]
# call the cached relation gets duped. Database is queried when +to_a+ is called on the relation.
#
# If you want to cache the statement without the values you can use the +bind+ method of the
@@ -25,7 +26,7 @@ module ActiveRecord
#
# And pass the bind values as the first argument of +execute+ call.
#
- # cache.execute(["my book"], Book, Book.connection)
+ # cache.execute(["my book"], Book.connection)
class StatementCache # :nodoc:
class Substitute; end # :nodoc:
@@ -40,28 +41,53 @@ module ActiveRecord
end
class PartialQuery < Query # :nodoc:
- def initialize values
+ def initialize(values)
@values = values
- @indexes = values.each_with_index.find_all { |thing,i|
- Arel::Nodes::BindParam === thing
+ @indexes = values.each_with_index.find_all { |thing, i|
+ Substitute === thing
}.map(&:last)
end
def sql_for(binds, connection)
val = @values.dup
- binds = connection.prepare_binds_for_database(binds)
- @indexes.each { |i| val[i] = connection.quote(binds.shift) }
+ casted_binds = binds.map(&:value_for_database)
+ @indexes.each { |i| val[i] = connection.quote(casted_binds.shift) }
val.join
end
end
- def self.query(visitor, ast)
- Query.new visitor.accept(ast, Arel::Collectors::SQLString.new).value
+ class PartialQueryCollector
+ def initialize
+ @parts = []
+ @binds = []
+ end
+
+ def <<(str)
+ @parts << str
+ self
+ end
+
+ def add_bind(obj)
+ @binds << obj
+ @parts << Substitute.new
+ self
+ end
+
+ def value
+ [@parts, @binds]
+ end
+ end
+
+ def self.query(sql)
+ Query.new(sql)
end
- def self.partial_query(visitor, ast, collector)
- collected = visitor.accept(ast, collector).value
- PartialQuery.new collected
+ def self.partial_query(values)
+ PartialQuery.new(values)
+ end
+
+ def self.partial_query_collector
+ PartialQueryCollector.new
end
class Params # :nodoc:
@@ -70,7 +96,7 @@ module ActiveRecord
class BindMap # :nodoc:
def initialize(bound_attributes)
- @indexes = []
+ @indexes = []
@bound_attributes = bound_attributes
bound_attributes.each_with_index do |attr, i|
@@ -82,32 +108,41 @@ module ActiveRecord
def bind(values)
bas = @bound_attributes.dup
- @indexes.each_with_index { |offset,i| bas[offset] = bas[offset].with_cast_value(values[i]) }
+ @indexes.each_with_index { |offset, i| bas[offset] = bas[offset].with_cast_value(values[i]) }
bas
end
end
- attr_reader :bind_map, :query_builder
-
- def self.create(connection, block = Proc.new)
- relation = block.call Params.new
- bind_map = BindMap.new relation.bound_attributes
- query_builder = connection.cacheable_query relation.arel
- new query_builder, bind_map
+ def self.create(connection, callable = nil, &block)
+ relation = (callable || block).call Params.new
+ query_builder, binds = connection.cacheable_query(self, relation.arel)
+ bind_map = BindMap.new(binds)
+ new(query_builder, bind_map, relation.klass)
end
- def initialize(query_builder, bind_map)
+ def initialize(query_builder, bind_map, klass)
@query_builder = query_builder
- @bind_map = bind_map
+ @bind_map = bind_map
+ @klass = klass
end
- def execute(params, klass, connection)
+ def execute(params, connection, &block)
bind_values = bind_map.bind params
sql = query_builder.sql_for bind_values, connection
- klass.find_by_sql(sql, bind_values, preparable: true)
+ klass.find_by_sql(sql, bind_values, preparable: true, &block)
+ rescue ::RangeError
+ nil
end
- alias :call :execute
+
+ def self.unsupported_value?(value)
+ case value
+ when NilClass, Array, Range, Hash, Relation, Base then true
+ end
+ end
+
+ private
+ attr_reader :query_builder, :bind_map, :klass
end
end
diff --git a/activerecord/lib/active_record/store.rb b/activerecord/lib/active_record/store.rb
index 1b407f7702..6fecb06897 100644
--- a/activerecord/lib/active_record/store.rb
+++ b/activerecord/lib/active_record/store.rb
@@ -1,4 +1,6 @@
-require 'active_support/core_ext/hash/indifferent_access'
+# frozen_string_literal: true
+
+require "active_support/core_ext/hash/indifferent_access"
module ActiveRecord
# Store gives you a thin wrapper around serialize for the purpose of storing hashes in a single column.
@@ -9,14 +11,20 @@ module ActiveRecord
# of the model. This is very helpful for easily exposing store keys to a form or elsewhere that's
# already built around just accessing attributes on the model.
#
+ # Every accessor comes with dirty tracking methods (+key_changed?+, +key_was+ and +key_change+) and
+ # methods to access the changes made during the last save (+saved_change_to_key?+, +saved_change_to_key+ and
+ # +key_before_last_save+).
+ #
+ # NOTE: There is no +key_will_change!+ method for accessors, use +store_will_change!+ instead.
+ #
# Make sure that you declare the database column used for the serialized store as a text, so there's
# plenty of room.
#
# You can set custom coder to encode/decode your serialized attributes to/from different formats.
# JSON, YAML, Marshal are supported out of the box. Generally it can be any wrapper that provides +load+ and +dump+.
#
- # NOTE: If you are using PostgreSQL specific columns like +hstore+ or +json+ there is no need for
- # the serialization provided by {.store}[rdoc-ref:rdoc-ref:ClassMethods#store].
+ # NOTE: If you are using structured database data types (eg. PostgreSQL +hstore+/+json+, or MySQL 5.7+
+ # +json+) there is no need for the serialization provided by {.store}[rdoc-ref:rdoc-ref:ClassMethods#store].
# Simply use {.store_accessor}[rdoc-ref:ClassMethods#store_accessor] instead to generate
# the accessor methods. Be aware that these columns use a string keyed hash and do not allow access
# using a symbol.
@@ -29,24 +37,40 @@ module ActiveRecord
#
# class User < ActiveRecord::Base
# store :settings, accessors: [ :color, :homepage ], coder: JSON
+ # store :parent, accessors: [ :name ], coder: JSON, prefix: true
+ # store :spouse, accessors: [ :name ], coder: JSON, prefix: :partner
+ # store :settings, accessors: [ :two_factor_auth ], suffix: true
+ # store :settings, accessors: [ :login_retry ], suffix: :config
# end
#
- # u = User.new(color: 'black', homepage: '37signals.com')
+ # u = User.new(color: 'black', homepage: '37signals.com', parent_name: 'Mary', partner_name: 'Lily')
# u.color # Accessor stored attribute
+ # u.parent_name # Accessor stored attribute with prefix
+ # u.partner_name # Accessor stored attribute with custom prefix
+ # u.two_factor_auth_settings # Accessor stored attribute with suffix
+ # u.login_retry_config # Accessor stored attribute with custom suffix
# u.settings[:country] = 'Denmark' # Any attribute, even if not specified with an accessor
#
# # There is no difference between strings and symbols for accessing custom attributes
# u.settings[:country] # => 'Denmark'
# u.settings['country'] # => 'Denmark'
#
+ # # Dirty tracking
+ # u.color = 'green'
+ # u.color_changed? # => true
+ # u.color_was # => 'black'
+ # u.color_change # => ['black', 'red']
+ #
# # Add additional accessors to an existing store through store_accessor
# class SuperUser < User
# store_accessor :settings, :privileges, :servants
+ # store_accessor :parent, :birthday, prefix: true
+ # store_accessor :settings, :secret_question, suffix: :config
# end
#
# The stored attribute names can be retrieved using {.stored_attributes}[rdoc-ref:rdoc-ref:ClassMethods#stored_attributes].
#
- # User.stored_attributes[:settings] # [:color, :homepage]
+ # User.stored_attributes[:settings] # [:color, :homepage, :two_factor_auth, :login_retry]
#
# == Overwriting default accessors
#
@@ -78,22 +102,79 @@ module ActiveRecord
module ClassMethods
def store(store_attribute, options = {})
- serialize store_attribute, IndifferentCoder.new(options[:coder])
- store_accessor(store_attribute, options[:accessors]) if options.has_key? :accessors
+ serialize store_attribute, IndifferentCoder.new(store_attribute, options[:coder])
+ store_accessor(store_attribute, options[:accessors], options.slice(:prefix, :suffix)) if options.has_key? :accessors
end
- def store_accessor(store_attribute, *keys)
+ def store_accessor(store_attribute, *keys, prefix: nil, suffix: nil)
keys = keys.flatten
+ accessor_prefix =
+ case prefix
+ when String, Symbol
+ "#{prefix}_"
+ when TrueClass
+ "#{store_attribute}_"
+ else
+ ""
+ end
+ accessor_suffix =
+ case suffix
+ when String, Symbol
+ "_#{suffix}"
+ when TrueClass
+ "_#{store_attribute}"
+ else
+ ""
+ end
+
_store_accessors_module.module_eval do
keys.each do |key|
- define_method("#{key}=") do |value|
+ accessor_key = "#{accessor_prefix}#{key}#{accessor_suffix}"
+
+ define_method("#{accessor_key}=") do |value|
write_store_attribute(store_attribute, key, value)
end
- define_method(key) do
+ define_method(accessor_key) do
read_store_attribute(store_attribute, key)
end
+
+ define_method("#{accessor_key}_changed?") do
+ return false unless attribute_changed?(store_attribute)
+ prev_store, new_store = changes[store_attribute]
+ prev_store&.dig(key) != new_store&.dig(key)
+ end
+
+ define_method("#{accessor_key}_change") do
+ return unless attribute_changed?(store_attribute)
+ prev_store, new_store = changes[store_attribute]
+ [prev_store&.dig(key), new_store&.dig(key)]
+ end
+
+ define_method("#{accessor_key}_was") do
+ return unless attribute_changed?(store_attribute)
+ prev_store, _new_store = changes[store_attribute]
+ prev_store&.dig(key)
+ end
+
+ define_method("saved_change_to_#{accessor_key}?") do
+ return false unless saved_change_to_attribute?(store_attribute)
+ prev_store, new_store = saved_change_to_attribute(store_attribute)
+ prev_store&.dig(key) != new_store&.dig(key)
+ end
+
+ define_method("saved_change_to_#{accessor_key}") do
+ return unless saved_change_to_attribute?(store_attribute)
+ prev_store, new_store = saved_change_to_attribute(store_attribute)
+ [prev_store&.dig(key), new_store&.dig(key)]
+ end
+
+ define_method("#{accessor_key}_before_last_save") do
+ return unless saved_change_to_attribute?(store_attribute)
+ prev_store, _new_store = saved_change_to_attribute(store_attribute)
+ prev_store&.dig(key)
+ end
end
end
@@ -114,27 +195,26 @@ module ActiveRecord
def stored_attributes
parent = superclass.respond_to?(:stored_attributes) ? superclass.stored_attributes : {}
- if self.local_stored_attributes
- parent.merge!(self.local_stored_attributes) { |k, a, b| a | b }
+ if local_stored_attributes
+ parent.merge!(local_stored_attributes) { |k, a, b| a | b }
end
parent
end
end
- protected
- def read_store_attribute(store_attribute, key)
+ private
+ def read_store_attribute(store_attribute, key) # :doc:
accessor = store_accessor_for(store_attribute)
accessor.read(self, store_attribute, key)
end
- def write_store_attribute(store_attribute, key, value)
+ def write_store_attribute(store_attribute, key, value) # :doc:
accessor = store_accessor_for(store_attribute)
accessor.write(self, store_attribute, key, value)
end
- private
def store_accessor_for(store_attribute)
- type_for_attribute(store_attribute.to_s).accessor
+ type_for_attribute(store_attribute).accessor
end
class HashAccessor # :nodoc:
@@ -177,34 +257,34 @@ module ActiveRecord
end
end
- class IndifferentCoder # :nodoc:
- def initialize(coder_or_class_name)
- @coder =
- if coder_or_class_name.respond_to?(:load) && coder_or_class_name.respond_to?(:dump)
- coder_or_class_name
- else
- ActiveRecord::Coders::YAMLColumn.new(coder_or_class_name || Object)
- end
- end
+ class IndifferentCoder # :nodoc:
+ def initialize(attr_name, coder_or_class_name)
+ @coder =
+ if coder_or_class_name.respond_to?(:load) && coder_or_class_name.respond_to?(:dump)
+ coder_or_class_name
+ else
+ ActiveRecord::Coders::YAMLColumn.new(attr_name, coder_or_class_name || Object)
+ end
+ end
- def dump(obj)
- @coder.dump self.class.as_indifferent_hash(obj)
- end
+ def dump(obj)
+ @coder.dump self.class.as_indifferent_hash(obj)
+ end
- def load(yaml)
- self.class.as_indifferent_hash(@coder.load(yaml || ''))
- end
+ def load(yaml)
+ self.class.as_indifferent_hash(@coder.load(yaml || ""))
+ end
- def self.as_indifferent_hash(obj)
- case obj
- when ActiveSupport::HashWithIndifferentAccess
- obj
- when Hash
- obj.with_indifferent_access
- else
- ActiveSupport::HashWithIndifferentAccess.new
+ def self.as_indifferent_hash(obj)
+ case obj
+ when ActiveSupport::HashWithIndifferentAccess
+ obj
+ when Hash
+ obj.with_indifferent_access
+ else
+ ActiveSupport::HashWithIndifferentAccess.new
+ end
end
end
- end
end
end
diff --git a/activerecord/lib/active_record/suppressor.rb b/activerecord/lib/active_record/suppressor.rb
index 8ec4b48d31..8cdb8e0765 100644
--- a/activerecord/lib/active_record/suppressor.rb
+++ b/activerecord/lib/active_record/suppressor.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
# ActiveRecord::Suppressor prevents the receiver from being saved during
# a given block.
@@ -30,10 +32,11 @@ module ActiveRecord
module ClassMethods
def suppress(&block)
+ previous_state = SuppressorRegistry.suppressed[name]
SuppressorRegistry.suppressed[name] = true
yield
ensure
- SuppressorRegistry.suppressed[name] = false
+ SuppressorRegistry.suppressed[name] = previous_state
end
end
diff --git a/activerecord/lib/active_record/table_metadata.rb b/activerecord/lib/active_record/table_metadata.rb
index 0faad48ce3..b67479fb6a 100644
--- a/activerecord/lib/active_record/table_metadata.rb
+++ b/activerecord/lib/active_record/table_metadata.rb
@@ -1,7 +1,8 @@
+# frozen_string_literal: true
+
module ActiveRecord
class TableMetadata # :nodoc:
- delegate :foreign_type, :foreign_key, to: :association, prefix: true
- delegate :association_primary_key, to: :association
+ delegate :foreign_type, :foreign_key, :join_primary_key, :join_foreign_key, to: :association, prefix: true
def initialize(klass, arel_table, association = nil)
@klass = klass
@@ -10,9 +11,7 @@ module ActiveRecord
end
def resolve_column_aliases(hash)
- # This method is a hot spot, so for now, use Hash[] to dup the hash.
- # https://bugs.ruby-lang.org/issues/7166
- new_hash = Hash[hash]
+ new_hash = hash.dup
hash.each do |key, _|
if (key.is_a?(Symbol)) && klass.attribute_alias?(key)
new_hash[klass.attribute_alias(key)] = new_hash.delete(key)
@@ -31,21 +30,26 @@ module ActiveRecord
def type(column_name)
if klass
- klass.type_for_attribute(column_name.to_s)
+ klass.type_for_attribute(column_name)
else
- Type::Value.new
+ Type.default_value
end
end
+ def has_column?(column_name)
+ klass && klass.columns_hash.key?(column_name.to_s)
+ end
+
def associated_with?(association_name)
klass && klass._reflect_on_association(association_name)
end
def associated_table(table_name)
- return self if table_name == arel_table.name
+ association = klass._reflect_on_association(table_name) || klass._reflect_on_association(table_name.to_s.singularize)
- association = klass._reflect_on_association(table_name)
- if association && !association.polymorphic?
+ if !association && table_name == arel_table.name
+ return self
+ elsif association && !association.polymorphic?
association_klass = association.klass
arel_table = association_klass.arel_table.alias(table_name)
else
@@ -61,8 +65,15 @@ module ActiveRecord
association && association.polymorphic?
end
- protected
+ def aggregated_with?(aggregation_name)
+ klass && reflect_on_aggregation(aggregation_name)
+ end
+
+ def reflect_on_aggregation(aggregation_name)
+ klass.reflect_on_aggregation(aggregation_name)
+ end
- attr_reader :klass, :arel_table, :association
+ private
+ attr_reader :klass, :arel_table, :association
end
end
diff --git a/activerecord/lib/active_record/tasks/database_tasks.rb b/activerecord/lib/active_record/tasks/database_tasks.rb
index 8881986f1b..155d2b0b98 100644
--- a/activerecord/lib/active_record/tasks/database_tasks.rb
+++ b/activerecord/lib/active_record/tasks/database_tasks.rb
@@ -1,4 +1,6 @@
-require 'active_support/core_ext/string/filters'
+# frozen_string_literal: true
+
+require "active_record/database_configurations"
module ActiveRecord
module Tasks # :nodoc:
@@ -8,7 +10,7 @@ module ActiveRecord
# ActiveRecord::Tasks::DatabaseTasks is a utility class, which encapsulates
# logic behind common tasks used to manage database and migrations.
#
- # The tasks defined here are used with Rake tasks provided by Active Record.
+ # The tasks defined here are used with Rails commands provided by Active Record.
#
# In order to use DatabaseTasks, a few config values need to be set. All the needed
# config values are set by Rails already, so it's necessary to do it only if you
@@ -35,19 +37,29 @@ module ActiveRecord
#
# DatabaseTasks.create_current('production')
module DatabaseTasks
+ ##
+ # :singleton-method:
+ # Extra flags passed to database CLI tool (mysqldump/pg_dump) when calling db:structure:dump
+ mattr_accessor :structure_dump_flags, instance_accessor: false
+
+ ##
+ # :singleton-method:
+ # Extra flags passed to database CLI tool when calling db:structure:load
+ mattr_accessor :structure_load_flags, instance_accessor: false
+
extend self
attr_writer :current_config, :db_dir, :migrations_paths, :fixtures_path, :root, :env, :seed_loader
attr_accessor :database_configuration
- LOCAL_HOSTS = ['127.0.0.1', 'localhost']
+ LOCAL_HOSTS = ["127.0.0.1", "localhost"]
def check_protected_environments!
- unless ENV['DISABLE_DATABASE_ENVIRONMENT_CHECK']
- current = ActiveRecord::Migrator.current_environment
- stored = ActiveRecord::Migrator.last_stored_environment
+ unless ENV["DISABLE_DATABASE_ENVIRONMENT_CHECK"]
+ current = ActiveRecord::Base.connection.migration_context.current_environment
+ stored = ActiveRecord::Base.connection.migration_context.last_stored_environment
- if ActiveRecord::Migrator.protected_environment?
+ if ActiveRecord::Base.connection.migration_context.protected_environment?
raise ActiveRecord::ProtectedEnvironmentError.new(stored)
end
@@ -63,24 +75,24 @@ module ActiveRecord
@tasks[pattern] = task
end
- register_task(/mysql/, ActiveRecord::Tasks::MySQLDatabaseTasks)
- register_task(/postgresql/, ActiveRecord::Tasks::PostgreSQLDatabaseTasks)
- register_task(/sqlite/, ActiveRecord::Tasks::SQLiteDatabaseTasks)
+ register_task(/mysql/, "ActiveRecord::Tasks::MySQLDatabaseTasks")
+ register_task(/postgresql/, "ActiveRecord::Tasks::PostgreSQLDatabaseTasks")
+ register_task(/sqlite/, "ActiveRecord::Tasks::SQLiteDatabaseTasks")
def db_dir
@db_dir ||= Rails.application.config.paths["db"].first
end
def migrations_paths
- @migrations_paths ||= Rails.application.paths['db/migrate'].to_a
+ @migrations_paths ||= Rails.application.paths["db/migrate"].to_a
end
def fixtures_path
- @fixtures_path ||= if ENV['FIXTURES_PATH']
- File.join(root, ENV['FIXTURES_PATH'])
- else
- File.join(root, 'test', 'fixtures')
- end
+ @fixtures_path ||= if ENV["FIXTURES_PATH"]
+ File.join(root, ENV["FIXTURES_PATH"])
+ else
+ File.join(root, "test", "fixtures")
+ end
end
def root
@@ -91,35 +103,53 @@ module ActiveRecord
@env ||= Rails.env
end
+ def spec
+ @spec ||= "primary"
+ end
+
def seed_loader
@seed_loader ||= Rails.application
end
def current_config(options = {})
- options.reverse_merge! :env => env
+ options.reverse_merge! env: env
+ options[:spec] ||= "primary"
if options.has_key?(:config)
@current_config = options[:config]
else
- @current_config ||= ActiveRecord::Base.configurations[options[:env]]
+ @current_config ||= ActiveRecord::Base.configurations.configs_for(env_name: options[:env], spec_name: options[:spec]).config
end
end
def create(*arguments)
configuration = arguments.first
- class_for_adapter(configuration['adapter']).new(*arguments).create
+ class_for_adapter(configuration["adapter"]).new(*arguments).create
+ $stdout.puts "Created database '#{configuration['database']}'" if verbose?
rescue DatabaseAlreadyExists
- $stderr.puts "#{configuration['database']} already exists"
+ $stderr.puts "Database '#{configuration['database']}' already exists" if verbose?
rescue Exception => error
$stderr.puts error
- $stderr.puts "Couldn't create database for #{configuration.inspect}"
+ $stderr.puts "Couldn't create '#{configuration['database']}' database. Please check your configuration."
raise
end
def create_all
- old_pool = ActiveRecord::Base.connection_handler.retrieve_connection_pool(ActiveRecord::Base)
+ old_pool = ActiveRecord::Base.connection_handler.retrieve_connection_pool(ActiveRecord::Base.connection_specification_name)
each_local_configuration { |configuration| create configuration }
if old_pool
- ActiveRecord::Base.connection_handler.establish_connection(ActiveRecord::Base, old_pool.spec)
+ ActiveRecord::Base.connection_handler.establish_connection(old_pool.spec.to_hash)
+ end
+ end
+
+ def for_each
+ databases = Rails.application.config.load_database_yaml
+ database_configs = ActiveRecord::DatabaseConfigurations.new(databases).configs_for(env_name: Rails.env)
+
+ # if this is a single database application we don't want tasks for each primary database
+ return if database_configs.count == 1
+
+ database_configs.each do |db_config|
+ yield db_config.spec_name
end
end
@@ -132,12 +162,13 @@ module ActiveRecord
def drop(*arguments)
configuration = arguments.first
- class_for_adapter(configuration['adapter']).new(*arguments).drop
+ class_for_adapter(configuration["adapter"]).new(*arguments).drop
+ $stdout.puts "Dropped database '#{configuration['database']}'" if verbose?
rescue ActiveRecord::NoDatabaseError
$stderr.puts "Database '#{configuration['database']}' does not exist"
rescue Exception => error
$stderr.puts error
- $stderr.puts "Couldn't drop #{configuration['database']}"
+ $stderr.puts "Couldn't drop database '#{configuration['database']}'"
raise
end
@@ -151,39 +182,85 @@ module ActiveRecord
}
end
+ def truncate_tables(configuration)
+ ActiveRecord::Base.connected_to(database: { truncation: configuration }) do
+ table_names = ActiveRecord::Base.connection.tables
+ table_names -= [
+ ActiveRecord::Base.schema_migrations_table_name,
+ ActiveRecord::Base.internal_metadata_table_name
+ ]
+
+ ActiveRecord::Base.connection.truncate_tables(*table_names)
+ end
+ end
+ private :truncate_tables
+
+ def truncate_all(environment = env)
+ ActiveRecord::Base.configurations.configs_for(env_name: environment).each do |db_config|
+ truncate_tables db_config.config
+ end
+ end
+
def migrate
- verbose = ENV["VERBOSE"] ? ENV["VERBOSE"] == "true" : true
- version = ENV["VERSION"] ? ENV["VERSION"].to_i : nil
- scope = ENV['SCOPE']
- verbose_was, Migration.verbose = Migration.verbose, verbose
- Migrator.migrate(migrations_paths, version) do |migration|
+ check_target_version
+
+ scope = ENV["SCOPE"]
+ verbose_was, Migration.verbose = Migration.verbose, verbose?
+
+ Base.connection.migration_context.migrate(target_version) do |migration|
scope.blank? || scope == migration.scope
end
+
ActiveRecord::Base.clear_cache!
ensure
Migration.verbose = verbose_was
end
- def charset_current(environment = env)
- charset ActiveRecord::Base.configurations[environment]
+ def migrate_status
+ unless ActiveRecord::SchemaMigration.table_exists?
+ Kernel.abort "Schema migrations table does not exist yet."
+ end
+
+ # output
+ puts "\ndatabase: #{ActiveRecord::Base.connection_config[:database]}\n\n"
+ puts "#{'Status'.center(8)} #{'Migration ID'.ljust(14)} Migration Name"
+ puts "-" * 50
+ ActiveRecord::Base.connection.migration_context.migrations_status.each do |status, version, name|
+ puts "#{status.center(8)} #{version.ljust(14)} #{name}"
+ end
+ puts
+ end
+
+ def check_target_version
+ if target_version && !(Migration::MigrationFilenameRegexp.match?(ENV["VERSION"]) || /\A\d+\z/.match?(ENV["VERSION"]))
+ raise "Invalid format of target version: `VERSION=#{ENV['VERSION']}`"
+ end
+ end
+
+ def target_version
+ ENV["VERSION"].to_i if ENV["VERSION"] && !ENV["VERSION"].empty?
+ end
+
+ def charset_current(environment = env, specification_name = spec)
+ charset ActiveRecord::Base.configurations.configs_for(env_name: environment, spec_name: specification_name).config
end
def charset(*arguments)
configuration = arguments.first
- class_for_adapter(configuration['adapter']).new(*arguments).charset
+ class_for_adapter(configuration["adapter"]).new(*arguments).charset
end
- def collation_current(environment = env)
- collation ActiveRecord::Base.configurations[environment]
+ def collation_current(environment = env, specification_name = spec)
+ collation ActiveRecord::Base.configurations.configs_for(env_name: environment, spec_name: specification_name).config
end
def collation(*arguments)
configuration = arguments.first
- class_for_adapter(configuration['adapter']).new(*arguments).collation
+ class_for_adapter(configuration["adapter"]).new(*arguments).collation
end
def purge(configuration)
- class_for_adapter(configuration['adapter']).new(configuration).purge
+ class_for_adapter(configuration["adapter"]).new(configuration).purge
end
def purge_all
@@ -202,61 +279,80 @@ module ActiveRecord
def structure_dump(*arguments)
configuration = arguments.first
filename = arguments.delete_at 1
- class_for_adapter(configuration['adapter']).new(*arguments).structure_dump(filename)
+ class_for_adapter(configuration["adapter"]).new(*arguments).structure_dump(filename, structure_dump_flags)
end
def structure_load(*arguments)
configuration = arguments.first
filename = arguments.delete_at 1
- class_for_adapter(configuration['adapter']).new(*arguments).structure_load(filename)
+ class_for_adapter(configuration["adapter"]).new(*arguments).structure_load(filename, structure_load_flags)
end
- def load_schema(configuration, format = ActiveRecord::Base.schema_format, file = nil) # :nodoc:
- file ||= schema_file(format)
+ def load_schema(configuration, format = ActiveRecord::Base.schema_format, file = nil, environment = env, spec_name = "primary") # :nodoc:
+ file ||= dump_filename(spec_name, format)
+
+ verbose_was, Migration.verbose = Migration.verbose, verbose? && ENV["VERBOSE"]
+ check_schema_file(file)
+ ActiveRecord::Base.establish_connection(configuration)
case format
when :ruby
- check_schema_file(file)
- ActiveRecord::Base.establish_connection(configuration)
load(file)
when :sql
- check_schema_file(file)
structure_load(configuration, file)
else
raise ArgumentError, "unknown format #{format.inspect}"
end
ActiveRecord::InternalMetadata.create_table
- ActiveRecord::InternalMetadata[:environment] = ActiveRecord::Migrator.current_environment
+ ActiveRecord::InternalMetadata[:environment] = environment
+ ensure
+ Migration.verbose = verbose_was
end
- def load_schema_for(*args)
- ActiveSupport::Deprecation.warn(<<-MSG.squish)
- This method was renamed to `#load_schema` and will be removed in the future.
- Use `#load_schema` instead.
- MSG
- load_schema(*args)
+ def schema_file(format = ActiveRecord::Base.schema_format)
+ File.join(db_dir, schema_file_type(format))
end
- def schema_file(format = ActiveRecord::Base.schema_format)
+ def schema_file_type(format = ActiveRecord::Base.schema_format)
case format
when :ruby
- File.join(db_dir, "schema.rb")
+ "schema.rb"
when :sql
- File.join(db_dir, "structure.sql")
+ "structure.sql"
end
end
+ def dump_filename(namespace, format = ActiveRecord::Base.schema_format)
+ filename = if namespace == "primary"
+ schema_file_type(format)
+ else
+ "#{namespace}_#{schema_file_type(format)}"
+ end
+
+ ENV["SCHEMA"] || File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, filename)
+ end
+
+ def cache_dump_filename(namespace)
+ filename = if namespace == "primary"
+ "schema_cache.yml"
+ else
+ "#{namespace}_schema_cache.yml"
+ end
+
+ ENV["SCHEMA_CACHE"] || File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, filename)
+ end
+
def load_schema_current(format = ActiveRecord::Base.schema_format, file = nil, environment = env)
- each_current_configuration(environment) { |configuration|
- load_schema configuration, format, file
+ each_current_configuration(environment) { |configuration, spec_name, env|
+ load_schema(configuration, format, file, env, spec_name)
}
ActiveRecord::Base.establish_connection(environment.to_sym)
end
def check_schema_file(filename)
unless File.exist?(filename)
- message = %{#{filename} doesn't exist yet. Run `rails db:migrate` to create it, then try again.}
- message << %{ If you do not intend to use a database, you should instead alter #{Rails.root}/config/application.rb to limit the frameworks that will be loaded.} if defined?(::Rails)
+ message = +%{#{filename} doesn't exist yet. Run `rails db:migrate` to create it, then try again.}
+ message << %{ If you do not intend to use a database, you should instead alter #{Rails.root}/config/application.rb to limit the frameworks that will be loaded.} if defined?(::Rails.root)
Kernel.abort message
end
end
@@ -265,47 +361,62 @@ module ActiveRecord
if seed_loader
seed_loader.load_seed
else
- raise "You tried to load seed data, but no seed loader is specified. Please specify seed " +
- "loader with ActiveRecord::Tasks::DatabaseTasks.seed_loader = your_seed_loader\n" +
+ raise "You tried to load seed data, but no seed loader is specified. Please specify seed " \
+ "loader with ActiveRecord::Tasks::DatabaseTasks.seed_loader = your_seed_loader\n" \
"Seed loader should respond to load_seed method"
end
end
+ # Dumps the schema cache in YAML format for the connection into the file
+ #
+ # ==== Examples:
+ # ActiveRecord::Tasks::DatabaseTasks.dump_schema_cache(ActiveRecord::Base.connection, "tmp/schema_dump.yaml")
+ def dump_schema_cache(conn, filename)
+ conn.schema_cache.clear!
+ conn.data_sources.each { |table| conn.schema_cache.add(table) }
+ open(filename, "wb") { |f| f.write(YAML.dump(conn.schema_cache)) }
+ end
+
private
+ def verbose?
+ ENV["VERBOSE"] ? ENV["VERBOSE"] != "false" : true
+ end
- def class_for_adapter(adapter)
- key = @tasks.keys.detect { |pattern| adapter[pattern] }
- unless key
- raise DatabaseNotSupported, "Rake tasks not supported by '#{adapter}' adapter"
+ def class_for_adapter(adapter)
+ _key, task = @tasks.each_pair.detect { |pattern, _task| adapter[pattern] }
+ unless task
+ raise DatabaseNotSupported, "Rake tasks not supported by '#{adapter}' adapter"
+ end
+ task.is_a?(String) ? task.constantize : task
end
- @tasks[key]
- end
- def each_current_configuration(environment)
- environments = [environment]
- environments << 'test' if environment == 'development'
+ def each_current_configuration(environment)
+ environments = [environment]
+ environments << "test" if environment == "development"
- configurations = ActiveRecord::Base.configurations.values_at(*environments)
- configurations.compact.each do |configuration|
- yield configuration unless configuration['database'].blank?
+ environments.each do |env|
+ ActiveRecord::Base.configurations.configs_for(env_name: env).each do |db_config|
+ yield db_config.config, db_config.spec_name, env
+ end
+ end
end
- end
- def each_local_configuration
- ActiveRecord::Base.configurations.each_value do |configuration|
- next unless configuration['database']
+ def each_local_configuration
+ ActiveRecord::Base.configurations.configs_for.each do |db_config|
+ configuration = db_config.config
+ next unless configuration["database"]
- if local_database?(configuration)
- yield configuration
- else
- $stderr.puts "This task only modifies local databases. #{configuration['database']} is on a remote host."
+ if local_database?(configuration)
+ yield configuration
+ else
+ $stderr.puts "This task only modifies local databases. #{configuration['database']} is on a remote host."
+ end
end
end
- end
- def local_database?(configuration)
- configuration['host'].blank? || LOCAL_HOSTS.include?(configuration['host'])
- end
+ def local_database?(configuration)
+ configuration["host"].blank? || LOCAL_HOSTS.include?(configuration["host"])
+ end
end
end
end
diff --git a/activerecord/lib/active_record/tasks/mysql_database_tasks.rb b/activerecord/lib/active_record/tasks/mysql_database_tasks.rb
index af0c935342..1c1b29b5e1 100644
--- a/activerecord/lib/active_record/tasks/mysql_database_tasks.rb
+++ b/activerecord/lib/active_record/tasks/mysql_database_tasks.rb
@@ -1,8 +1,8 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Tasks # :nodoc:
class MySQLDatabaseTasks # :nodoc:
- ACCESS_DENIED_ERROR = 1045
-
delegate :connection, :establish_connection, to: ActiveRecord::Base
def initialize(configuration)
@@ -11,38 +11,24 @@ module ActiveRecord
def create
establish_connection configuration_without_database
- connection.create_database configuration['database'], creation_options
+ connection.create_database configuration["database"], creation_options
establish_connection configuration
rescue ActiveRecord::StatementInvalid => error
- if /database exists/ === error.message
+ if error.message.include?("database exists")
raise DatabaseAlreadyExists
else
raise
end
- rescue error_class => error
- if error.respond_to?(:errno) && error.errno == ACCESS_DENIED_ERROR
- $stdout.print error.message
- establish_connection root_configuration_without_database
- connection.create_database configuration['database'], creation_options
- if configuration['username'] != 'root'
- connection.execute grant_statement.gsub(/\s+/, ' ').strip
- end
- establish_connection configuration
- else
- $stderr.puts error.inspect
- $stderr.puts "Couldn't create database for #{configuration.inspect}, #{creation_options.inspect}"
- $stderr.puts "(If you set the charset manually, make sure you have a matching collation)" if configuration['encoding']
- end
end
def drop
establish_connection configuration
- connection.drop_database configuration['database']
+ connection.drop_database configuration["database"]
end
def purge
establish_connection configuration
- connection.recreate_database configuration['database'], creation_options
+ connection.recreate_database configuration["database"], creation_options
end
def charset
@@ -53,99 +39,75 @@ module ActiveRecord
connection.collation
end
- def structure_dump(filename)
+ def structure_dump(filename, extra_flags)
args = prepare_command_options
args.concat(["--result-file", "#{filename}"])
args.concat(["--no-data"])
args.concat(["--routines"])
+ args.concat(["--skip-comments"])
+
+ ignore_tables = ActiveRecord::SchemaDumper.ignore_tables
+ if ignore_tables.any?
+ args += ignore_tables.map { |table| "--ignore-table=#{configuration['database']}.#{table}" }
+ end
+
args.concat(["#{configuration['database']}"])
+ args.unshift(*extra_flags) if extra_flags
- run_cmd('mysqldump', args, 'dumping')
+ run_cmd("mysqldump", args, "dumping")
end
- def structure_load(filename)
+ def structure_load(filename, extra_flags)
args = prepare_command_options
- args.concat(['--execute', %{SET FOREIGN_KEY_CHECKS = 0; SOURCE #{filename}; SET FOREIGN_KEY_CHECKS = 1}])
+ args.concat(["--execute", %{SET FOREIGN_KEY_CHECKS = 0; SOURCE #{filename}; SET FOREIGN_KEY_CHECKS = 1}])
args.concat(["--database", "#{configuration['database']}"])
+ args.unshift(*extra_flags) if extra_flags
- run_cmd('mysql', args, 'loading')
+ run_cmd("mysql", args, "loading")
end
private
- def configuration
- @configuration
- end
-
- def configuration_without_database
- configuration.merge('database' => nil)
- end
+ attr_reader :configuration
- def creation_options
- Hash.new.tap do |options|
- options[:charset] = configuration['encoding'] if configuration.include? 'encoding'
- options[:collation] = configuration['collation'] if configuration.include? 'collation'
+ def configuration_without_database
+ configuration.merge("database" => nil)
end
- end
- def error_class
- if configuration['adapter'] =~ /jdbc/
- require 'active_record/railties/jdbcmysql_error'
- ArJdbcMySQL::Error
- elsif defined?(Mysql2)
- Mysql2::Error
- else
- StandardError
+ def creation_options
+ Hash.new.tap do |options|
+ options[:charset] = configuration["encoding"] if configuration.include? "encoding"
+ options[:collation] = configuration["collation"] if configuration.include? "collation"
+ end
end
- end
-
- def grant_statement
- <<-SQL
-GRANT ALL PRIVILEGES ON #{configuration['database']}.*
- TO '#{configuration['username']}'@'localhost'
-IDENTIFIED BY '#{configuration['password']}' WITH GRANT OPTION;
- SQL
- end
-
- def root_configuration_without_database
- configuration_without_database.merge(
- 'username' => 'root',
- 'password' => root_password
- )
- end
-
- def root_password
- $stdout.print "Please provide the root password for your MySQL installation\n>"
- $stdin.gets.strip
- end
- def prepare_command_options
- args = {
- 'host' => '--host',
- 'port' => '--port',
- 'socket' => '--socket',
- 'username' => '--user',
- 'password' => '--password',
- 'encoding' => '--default-character-set',
- 'sslca' => '--ssl-ca',
- 'sslcert' => '--ssl-cert',
- 'sslcapath' => '--ssl-capath',
- 'sslcipher' => '--ssl-cipher',
- 'sslkey' => '--ssl-key'
- }.map { |opt, arg| "#{arg}=#{configuration[opt]}" if configuration[opt] }.compact
-
- args
- end
+ def prepare_command_options
+ args = {
+ "host" => "--host",
+ "port" => "--port",
+ "socket" => "--socket",
+ "username" => "--user",
+ "password" => "--password",
+ "encoding" => "--default-character-set",
+ "sslca" => "--ssl-ca",
+ "sslcert" => "--ssl-cert",
+ "sslcapath" => "--ssl-capath",
+ "sslcipher" => "--ssl-cipher",
+ "sslkey" => "--ssl-key"
+ }.map { |opt, arg| "#{arg}=#{configuration[opt]}" if configuration[opt] }.compact
+
+ args
+ end
- def run_cmd(cmd, args, action)
- fail run_cmd_error(cmd, args, action) unless Kernel.system(cmd, *args)
- end
+ def run_cmd(cmd, args, action)
+ fail run_cmd_error(cmd, args, action) unless Kernel.system(cmd, *args)
+ end
- def run_cmd_error(cmd, args, action)
- msg = "failed to execute: `#{cmd}`\n"
- msg << "Please check the output above for any errors and make sure that `#{cmd}` is installed in your PATH and has proper permissions.\n\n"
- msg
- end
+ def run_cmd_error(cmd, args, action)
+ msg = +"failed to execute: `#{cmd}`\n"
+ msg << "Please check the output above for any errors and make sure that `#{cmd}` is installed in your PATH and has proper permissions.\n\n"
+ msg
+ end
end
end
end
diff --git a/activerecord/lib/active_record/tasks/postgresql_database_tasks.rb b/activerecord/lib/active_record/tasks/postgresql_database_tasks.rb
index 8b4874044c..8acb11f75f 100644
--- a/activerecord/lib/active_record/tasks/postgresql_database_tasks.rb
+++ b/activerecord/lib/active_record/tasks/postgresql_database_tasks.rb
@@ -1,7 +1,13 @@
+# frozen_string_literal: true
+
+require "tempfile"
+
module ActiveRecord
module Tasks # :nodoc:
class PostgreSQLDatabaseTasks # :nodoc:
- DEFAULT_ENCODING = ENV['CHARSET'] || 'utf8'
+ DEFAULT_ENCODING = ENV["CHARSET"] || "utf8"
+ ON_ERROR_STOP_1 = "ON_ERROR_STOP=1"
+ SQL_COMMENT_BEGIN = "--"
delegate :connection, :establish_connection, :clear_active_connections!,
to: ActiveRecord::Base
@@ -12,11 +18,11 @@ module ActiveRecord
def create(master_established = false)
establish_master_connection unless master_established
- connection.create_database configuration['database'],
- configuration.merge('encoding' => encoding)
+ connection.create_database configuration["database"],
+ configuration.merge("encoding" => encoding)
establish_connection configuration
rescue ActiveRecord::StatementInvalid => error
- if /database .* already exists/ === error.message
+ if error.cause.is_a?(PG::DuplicateDatabase)
raise DatabaseAlreadyExists
else
raise
@@ -25,7 +31,7 @@ module ActiveRecord
def drop
establish_master_connection
- connection.drop_database configuration['database']
+ connection.drop_database configuration["database"]
end
def charset
@@ -42,69 +48,94 @@ module ActiveRecord
create true
end
- def structure_dump(filename)
+ def structure_dump(filename, extra_flags)
set_psql_env
- search_path = case ActiveRecord::Base.dump_schemas
- when :schema_search_path
- configuration['schema_search_path']
- when :all
- nil
- when String
- ActiveRecord::Base.dump_schemas
- end
+ search_path = \
+ case ActiveRecord::Base.dump_schemas
+ when :schema_search_path
+ configuration["schema_search_path"]
+ when :all
+ nil
+ when String
+ ActiveRecord::Base.dump_schemas
+ end
- args = ['-s', '-x', '-O', '-f', filename]
+ args = ["-s", "-x", "-O", "-f", filename]
+ args.concat(Array(extra_flags)) if extra_flags
unless search_path.blank?
- args += search_path.split(',').map do |part|
+ args += search_path.split(",").map do |part|
"--schema=#{part.strip}"
end
end
- args << configuration['database']
- run_cmd('pg_dump', args, 'dumping')
+
+ ignore_tables = ActiveRecord::SchemaDumper.ignore_tables
+ if ignore_tables.any?
+ args += ignore_tables.flat_map { |table| ["-T", table] }
+ end
+
+ args << configuration["database"]
+ run_cmd("pg_dump", args, "dumping")
+ remove_sql_header_comments(filename)
File.open(filename, "a") { |f| f << "SET search_path TO #{connection.schema_search_path};\n\n" }
end
- def structure_load(filename)
+ def structure_load(filename, extra_flags)
set_psql_env
- args = [ '-q', '-f', filename, configuration['database'] ]
- run_cmd('psql', args, 'loading' )
+ args = ["-v", ON_ERROR_STOP_1, "-q", "-X", "-f", filename]
+ args.concat(Array(extra_flags)) if extra_flags
+ args << configuration["database"]
+ run_cmd("psql", args, "loading")
end
private
- def configuration
- @configuration
- end
+ attr_reader :configuration
- def encoding
- configuration['encoding'] || DEFAULT_ENCODING
- end
+ def encoding
+ configuration["encoding"] || DEFAULT_ENCODING
+ end
- def establish_master_connection
- establish_connection configuration.merge(
- 'database' => 'postgres',
- 'schema_search_path' => 'public'
- )
- end
+ def establish_master_connection
+ establish_connection configuration.merge(
+ "database" => "postgres",
+ "schema_search_path" => "public"
+ )
+ end
- def set_psql_env
- ENV['PGHOST'] = configuration['host'] if configuration['host']
- ENV['PGPORT'] = configuration['port'].to_s if configuration['port']
- ENV['PGPASSWORD'] = configuration['password'].to_s if configuration['password']
- ENV['PGUSER'] = configuration['username'].to_s if configuration['username']
- end
+ def set_psql_env
+ ENV["PGHOST"] = configuration["host"] if configuration["host"]
+ ENV["PGPORT"] = configuration["port"].to_s if configuration["port"]
+ ENV["PGPASSWORD"] = configuration["password"].to_s if configuration["password"]
+ ENV["PGUSER"] = configuration["username"].to_s if configuration["username"]
+ end
- def run_cmd(cmd, args, action)
- fail run_cmd_error(cmd, args, action) unless Kernel.system(cmd, *args)
- end
+ def run_cmd(cmd, args, action)
+ fail run_cmd_error(cmd, args, action) unless Kernel.system(cmd, *args)
+ end
- def run_cmd_error(cmd, args, action)
- msg = "failed to execute:\n"
- msg << "#{cmd} #{args.join(' ')}\n\n"
- msg << "Please check the output above for any errors and make sure that `#{cmd}` is installed in your PATH and has proper permissions.\n\n"
- msg
- end
+ def run_cmd_error(cmd, args, action)
+ msg = +"failed to execute:\n"
+ msg << "#{cmd} #{args.join(' ')}\n\n"
+ msg << "Please check the output above for any errors and make sure that `#{cmd}` is installed in your PATH and has proper permissions.\n\n"
+ msg
+ end
+
+ def remove_sql_header_comments(filename)
+ removing_comments = true
+ tempfile = Tempfile.open("uncommented_structure.sql")
+ begin
+ File.foreach(filename) do |line|
+ unless removing_comments && (line.start_with?(SQL_COMMENT_BEGIN) || line.blank?)
+ tempfile << line
+ removing_comments = false
+ end
+ end
+ ensure
+ tempfile.close
+ end
+ FileUtils.cp(tempfile.path, filename)
+ end
end
end
end
diff --git a/activerecord/lib/active_record/tasks/sqlite_database_tasks.rb b/activerecord/lib/active_record/tasks/sqlite_database_tasks.rb
index 9ec3c8a94a..a82cea80ca 100644
--- a/activerecord/lib/active_record/tasks/sqlite_database_tasks.rb
+++ b/activerecord/lib/active_record/tasks/sqlite_database_tasks.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Tasks # :nodoc:
class SQLiteDatabaseTasks # :nodoc:
@@ -8,20 +10,20 @@ module ActiveRecord
end
def create
- raise DatabaseAlreadyExists if File.exist?(configuration['database'])
+ raise DatabaseAlreadyExists if File.exist?(configuration["database"])
establish_connection configuration
connection
end
def drop
- require 'pathname'
- path = Pathname.new configuration['database']
+ require "pathname"
+ path = Pathname.new configuration["database"]
file = path.absolute? ? path.to_s : File.join(root, path)
FileUtils.rm(file)
rescue Errno::ENOENT => error
- raise NoDatabaseError.new(error.message, error)
+ raise NoDatabaseError.new(error.message)
end
def purge
@@ -35,25 +37,41 @@ module ActiveRecord
connection.encoding
end
- def structure_dump(filename)
- dbfile = configuration['database']
- `sqlite3 #{dbfile} .schema > #{filename}`
+ def structure_dump(filename, extra_flags)
+ args = []
+ args.concat(Array(extra_flags)) if extra_flags
+ args << configuration["database"]
+
+ ignore_tables = ActiveRecord::SchemaDumper.ignore_tables
+ if ignore_tables.any?
+ condition = ignore_tables.map { |table| connection.quote(table) }.join(", ")
+ args << "SELECT sql FROM sqlite_master WHERE tbl_name NOT IN (#{condition}) ORDER BY tbl_name, type DESC, name"
+ else
+ args << ".schema"
+ end
+ run_cmd("sqlite3", args, filename)
end
- def structure_load(filename)
- dbfile = configuration['database']
- `sqlite3 #{dbfile} < "#{filename}"`
+ def structure_load(filename, extra_flags)
+ dbfile = configuration["database"]
+ flags = extra_flags.join(" ") if extra_flags
+ `sqlite3 #{flags} #{dbfile} < "#{filename}"`
end
private
- def configuration
- @configuration
- end
+ attr_reader :configuration, :root
- def root
- @root
- end
+ def run_cmd(cmd, args, out)
+ fail run_cmd_error(cmd, args) unless Kernel.system(cmd, *args, out: out)
+ end
+
+ def run_cmd_error(cmd, args)
+ msg = +"failed to execute:\n"
+ msg << "#{cmd} #{args.join(' ')}\n\n"
+ msg << "Please check the output above for any errors and make sure that `#{cmd}` is installed in your PATH and has proper permissions.\n\n"
+ msg
+ end
end
end
end
diff --git a/activerecord/lib/active_record/test_databases.rb b/activerecord/lib/active_record/test_databases.rb
new file mode 100644
index 0000000000..999830ba79
--- /dev/null
+++ b/activerecord/lib/active_record/test_databases.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require "active_support/testing/parallelization"
+
+module ActiveRecord
+ module TestDatabases # :nodoc:
+ ActiveSupport::Testing::Parallelization.after_fork_hook do |i|
+ create_and_load_schema(i, env_name: Rails.env)
+ end
+
+ ActiveSupport::Testing::Parallelization.run_cleanup_hook do
+ drop(env_name: Rails.env)
+ end
+
+ def self.create_and_load_schema(i, env_name:)
+ old, ENV["VERBOSE"] = ENV["VERBOSE"], "false"
+
+ ActiveRecord::Base.configurations.configs_for(env_name: env_name).each do |db_config|
+ db_config.config["database"] += "-#{i}"
+ ActiveRecord::Tasks::DatabaseTasks.create(db_config.config)
+ ActiveRecord::Tasks::DatabaseTasks.load_schema(db_config.config, ActiveRecord::Base.schema_format, nil, env_name, db_config.spec_name)
+ end
+ ensure
+ ActiveRecord::Base.establish_connection(Rails.env.to_sym)
+ ENV["VERBOSE"] = old
+ end
+
+ def self.drop(env_name:)
+ old, ENV["VERBOSE"] = ENV["VERBOSE"], "false"
+
+ ActiveRecord::Base.configurations.configs_for(env_name: env_name).each do |db_config|
+ ActiveRecord::Tasks::DatabaseTasks.drop(db_config.config)
+ end
+ ensure
+ ENV["VERBOSE"] = old
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/test_fixtures.rb b/activerecord/lib/active_record/test_fixtures.rb
new file mode 100644
index 0000000000..8c60d71669
--- /dev/null
+++ b/activerecord/lib/active_record/test_fixtures.rb
@@ -0,0 +1,224 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module TestFixtures
+ extend ActiveSupport::Concern
+
+ def before_setup # :nodoc:
+ setup_fixtures
+ super
+ end
+
+ def after_teardown # :nodoc:
+ super
+ teardown_fixtures
+ end
+
+ included do
+ class_attribute :fixture_path, instance_writer: false
+ class_attribute :fixture_table_names, default: []
+ class_attribute :fixture_class_names, default: {}
+ class_attribute :use_transactional_tests, default: true
+ class_attribute :use_instantiated_fixtures, default: false # true, false, or :no_instances
+ class_attribute :pre_loaded_fixtures, default: false
+ class_attribute :config, default: ActiveRecord::Base
+ class_attribute :lock_threads, default: true
+ end
+
+ module ClassMethods
+ # Sets the model class for a fixture when the class name cannot be inferred from the fixture name.
+ #
+ # Examples:
+ #
+ # set_fixture_class some_fixture: SomeModel,
+ # 'namespaced/fixture' => Another::Model
+ #
+ # The keys must be the fixture names, that coincide with the short paths to the fixture files.
+ def set_fixture_class(class_names = {})
+ self.fixture_class_names = fixture_class_names.merge(class_names.stringify_keys)
+ end
+
+ def fixtures(*fixture_set_names)
+ if fixture_set_names.first == :all
+ raise StandardError, "No fixture path found. Please set `#{self}.fixture_path`." if fixture_path.blank?
+ fixture_set_names = Dir["#{fixture_path}/{**,*}/*.{yml}"].uniq
+ fixture_set_names.map! { |f| f[(fixture_path.to_s.size + 1)..-5] }
+ else
+ fixture_set_names = fixture_set_names.flatten.map(&:to_s)
+ end
+
+ self.fixture_table_names |= fixture_set_names
+ setup_fixture_accessors(fixture_set_names)
+ end
+
+ def setup_fixture_accessors(fixture_set_names = nil)
+ fixture_set_names = Array(fixture_set_names || fixture_table_names)
+ methods = Module.new do
+ fixture_set_names.each do |fs_name|
+ fs_name = fs_name.to_s
+ accessor_name = fs_name.tr("/", "_").to_sym
+
+ define_method(accessor_name) do |*fixture_names|
+ force_reload = fixture_names.pop if fixture_names.last == true || fixture_names.last == :reload
+ return_single_record = fixture_names.size == 1
+ fixture_names = @loaded_fixtures[fs_name].fixtures.keys if fixture_names.empty?
+
+ @fixture_cache[fs_name] ||= {}
+
+ instances = fixture_names.map do |f_name|
+ f_name = f_name.to_s if f_name.is_a?(Symbol)
+ @fixture_cache[fs_name].delete(f_name) if force_reload
+
+ if @loaded_fixtures[fs_name][f_name]
+ @fixture_cache[fs_name][f_name] ||= @loaded_fixtures[fs_name][f_name].find
+ else
+ raise StandardError, "No fixture named '#{f_name}' found for fixture set '#{fs_name}'"
+ end
+ end
+
+ return_single_record ? instances.first : instances
+ end
+ private accessor_name
+ end
+ end
+ include methods
+ end
+
+ def uses_transaction(*methods)
+ @uses_transaction = [] unless defined?(@uses_transaction)
+ @uses_transaction.concat methods.map(&:to_s)
+ end
+
+ def uses_transaction?(method)
+ @uses_transaction = [] unless defined?(@uses_transaction)
+ @uses_transaction.include?(method.to_s)
+ end
+ end
+
+ def run_in_transaction?
+ use_transactional_tests &&
+ !self.class.uses_transaction?(method_name)
+ end
+
+ def setup_fixtures(config = ActiveRecord::Base)
+ if pre_loaded_fixtures && !use_transactional_tests
+ raise RuntimeError, "pre_loaded_fixtures requires use_transactional_tests"
+ end
+
+ @fixture_cache = {}
+ @fixture_connections = []
+ @@already_loaded_fixtures ||= {}
+ @connection_subscriber = nil
+
+ # Load fixtures once and begin transaction.
+ if run_in_transaction?
+ if @@already_loaded_fixtures[self.class]
+ @loaded_fixtures = @@already_loaded_fixtures[self.class]
+ else
+ @loaded_fixtures = load_fixtures(config)
+ @@already_loaded_fixtures[self.class] = @loaded_fixtures
+ end
+
+ # Begin transactions for connections already established
+ @fixture_connections = enlist_fixture_connections
+ @fixture_connections.each do |connection|
+ connection.begin_transaction joinable: false, _lazy: false
+ connection.pool.lock_thread = true if lock_threads
+ end
+
+ # When connections are established in the future, begin a transaction too
+ @connection_subscriber = ActiveSupport::Notifications.subscribe("!connection.active_record") do |_, _, _, _, payload|
+ spec_name = payload[:spec_name] if payload.key?(:spec_name)
+
+ if spec_name
+ begin
+ connection = ActiveRecord::Base.connection_handler.retrieve_connection(spec_name)
+ rescue ConnectionNotEstablished
+ connection = nil
+ end
+
+ if connection && !@fixture_connections.include?(connection)
+ connection.begin_transaction joinable: false, _lazy: false
+ connection.pool.lock_thread = true if lock_threads
+ @fixture_connections << connection
+ end
+ end
+ end
+
+ # Load fixtures for every test.
+ else
+ ActiveRecord::FixtureSet.reset_cache
+ @@already_loaded_fixtures[self.class] = nil
+ @loaded_fixtures = load_fixtures(config)
+ end
+
+ # Instantiate fixtures for every test if requested.
+ instantiate_fixtures if use_instantiated_fixtures
+ end
+
+ def teardown_fixtures
+ # Rollback changes if a transaction is active.
+ if run_in_transaction?
+ ActiveSupport::Notifications.unsubscribe(@connection_subscriber) if @connection_subscriber
+ @fixture_connections.each do |connection|
+ connection.rollback_transaction if connection.transaction_open?
+ connection.pool.lock_thread = false
+ end
+ @fixture_connections.clear
+ else
+ ActiveRecord::FixtureSet.reset_cache
+ end
+
+ ActiveRecord::Base.clear_active_connections!
+ end
+
+ def enlist_fixture_connections
+ setup_shared_connection_pool
+
+ ActiveRecord::Base.connection_handler.connection_pool_list.map(&:connection)
+ end
+
+ private
+
+ # Shares the writing connection pool with connections on
+ # other handlers.
+ #
+ # In an application with a primary and replica the test fixtures
+ # need to share a connection pool so that the reading connection
+ # can see data in the open transaction on the writing connection.
+ def setup_shared_connection_pool
+ writing_handler = ActiveRecord::Base.connection_handler
+
+ ActiveRecord::Base.connection_handlers.values.each do |handler|
+ if handler != writing_handler
+ handler.connection_pool_list.each do |pool|
+ name = pool.spec.name
+ writing_connection = writing_handler.retrieve_connection_pool(name)
+ handler.send(:owner_to_pool)[name] = writing_connection
+ end
+ end
+ end
+ end
+
+ def load_fixtures(config)
+ fixtures = ActiveRecord::FixtureSet.create_fixtures(fixture_path, fixture_table_names, fixture_class_names, config)
+ Hash[fixtures.map { |f| [f.name, f] }]
+ end
+
+ def instantiate_fixtures
+ if pre_loaded_fixtures
+ raise RuntimeError, "Load fixtures before instantiating them." if ActiveRecord::FixtureSet.all_loaded_fixtures.empty?
+ ActiveRecord::FixtureSet.instantiate_all_loaded_fixtures(self, load_instances?)
+ else
+ raise RuntimeError, "Load fixtures before instantiating them." if @loaded_fixtures.nil?
+ @loaded_fixtures.each_value do |fixture_set|
+ ActiveRecord::FixtureSet.instantiate_fixtures(self, fixture_set, load_instances?)
+ end
+ end
+ end
+
+ def load_instances?
+ use_instantiated_fixtures != :no_instances
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/timestamp.rb b/activerecord/lib/active_record/timestamp.rb
index d9c18a5e38..04a1c03474 100644
--- a/activerecord/lib/active_record/timestamp.rb
+++ b/activerecord/lib/active_record/timestamp.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
# = Active Record \Timestamp
#
@@ -42,8 +44,7 @@ module ActiveRecord
extend ActiveSupport::Concern
included do
- class_attribute :record_timestamps
- self.record_timestamps = true
+ class_attribute :record_timestamps, default: true
end
def initialize_dup(other) # :nodoc:
@@ -51,16 +52,48 @@ module ActiveRecord
clear_timestamp_attributes
end
+ module ClassMethods # :nodoc:
+ def touch_attributes_with_time(*names, time: nil)
+ attribute_names = timestamp_attributes_for_update_in_model
+ attribute_names |= names.map(&:to_s)
+ attribute_names.index_with(time || current_time_from_proper_timezone)
+ end
+
+ private
+ def timestamp_attributes_for_create_in_model
+ timestamp_attributes_for_create.select { |c| column_names.include?(c) }
+ end
+
+ def timestamp_attributes_for_update_in_model
+ timestamp_attributes_for_update.select { |c| column_names.include?(c) }
+ end
+
+ def all_timestamp_attributes_in_model
+ timestamp_attributes_for_create_in_model + timestamp_attributes_for_update_in_model
+ end
+
+ def timestamp_attributes_for_create
+ ["created_at", "created_on"]
+ end
+
+ def timestamp_attributes_for_update
+ ["updated_at", "updated_on"]
+ end
+
+ def current_time_from_proper_timezone
+ default_timezone == :utc ? Time.now.utc : Time.now
+ end
+ end
+
private
def _create_record
- if self.record_timestamps
+ if record_timestamps
current_time = current_time_from_proper_timezone
- all_timestamp_attributes.each do |column|
- column = column.to_s
- if has_attribute?(column) && !attribute_present?(column)
- write_attribute(column, current_time)
+ all_timestamp_attributes_in_model.each do |column|
+ if !attribute_present?(column)
+ _write_attribute(column, current_time)
end
end
end
@@ -68,59 +101,51 @@ module ActiveRecord
super
end
- def _update_record(*args, touch: true, **options)
- if touch && should_record_timestamps?
+ def _update_record
+ if @_touch_record && should_record_timestamps?
current_time = current_time_from_proper_timezone
timestamp_attributes_for_update_in_model.each do |column|
- column = column.to_s
- next if attribute_changed?(column)
- write_attribute(column, current_time)
+ next if will_save_change_to_attribute?(column)
+ _write_attribute(column, current_time)
end
end
- super(*args)
+
+ super
+ end
+
+ def create_or_update(touch: true, **)
+ @_touch_record = touch
+ super
end
def should_record_timestamps?
- self.record_timestamps && (!partial_writes? || changed?)
+ record_timestamps && (!partial_writes? || has_changes_to_save?)
end
def timestamp_attributes_for_create_in_model
- timestamp_attributes_for_create.select { |c| self.class.column_names.include?(c.to_s) }
+ self.class.send(:timestamp_attributes_for_create_in_model)
end
def timestamp_attributes_for_update_in_model
- timestamp_attributes_for_update.select { |c| self.class.column_names.include?(c.to_s) }
+ self.class.send(:timestamp_attributes_for_update_in_model)
end
def all_timestamp_attributes_in_model
- timestamp_attributes_for_create_in_model + timestamp_attributes_for_update_in_model
- end
-
- def timestamp_attributes_for_update
- [:updated_at, :updated_on]
- end
-
- def timestamp_attributes_for_create
- [:created_at, :created_on]
+ self.class.send(:all_timestamp_attributes_in_model)
end
- def all_timestamp_attributes
- timestamp_attributes_for_create + timestamp_attributes_for_update
+ def current_time_from_proper_timezone
+ self.class.send(:current_time_from_proper_timezone)
end
- def max_updated_column_timestamp(timestamp_names = timestamp_attributes_for_update)
- timestamp_names
- .map { |attr| self[attr] }
+ def max_updated_column_timestamp
+ timestamp_attributes_for_update_in_model
+ .map { |attr| self[attr]&.to_time }
.compact
- .map(&:to_time)
.max
end
- def current_time_from_proper_timezone
- self.class.default_timezone == :utc ? Time.now.utc : Time.now
- end
-
# Clear attributes and changed_attributes
def clear_timestamp_attributes
all_timestamp_attributes_in_model.each do |attribute_name|
diff --git a/activerecord/lib/active_record/touch_later.rb b/activerecord/lib/active_record/touch_later.rb
index 9a80a63e28..f70b7c50a2 100644
--- a/activerecord/lib/active_record/touch_later.rb
+++ b/activerecord/lib/active_record/touch_later.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
# = Active Record Touch Later
module TouchLater
@@ -8,7 +10,12 @@ module ActiveRecord
end
def touch_later(*names) # :nodoc:
- raise ActiveRecordError, "cannot touch on a new record object" unless persisted?
+ unless persisted?
+ raise ActiveRecordError, <<-MSG.squish
+ cannot touch on a new or destroyed record object. Consider using
+ persisted?, new_record?, or destroyed? before touching
+ MSG
+ end
@_defer_touch_attrs ||= timestamp_attributes_for_update_in_model
@_defer_touch_attrs |= names
@@ -20,7 +27,7 @@ module ActiveRecord
# touch the parents as we are not calling the after_save callbacks
self.class.reflect_on_all_associations(:belongs_to).each do |r|
if touch = r.options[:touch]
- ActiveRecord::Associations::Builder::BelongsTo.touch_record(self, r.foreign_key, r.name, touch, :touch_later)
+ ActiveRecord::Associations::Builder::BelongsTo.touch_record(self, changes_to_save, r.foreign_key, r.name, touch, :touch_later)
end
end
end
@@ -53,6 +60,5 @@ module ActiveRecord
def belongs_to_touch_method
:touch_later
end
-
end
end
diff --git a/activerecord/lib/active_record/transactions.rb b/activerecord/lib/active_record/transactions.rb
index 77c2845d88..fe3842b905 100644
--- a/activerecord/lib/active_record/transactions.rb
+++ b/activerecord/lib/active_record/transactions.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
# See ActiveRecord::Transactions::ClassMethods for documentation.
module Transactions
@@ -11,7 +13,6 @@ module ActiveRecord
:before_commit_without_transaction_enrollment,
:commit_without_transaction_enrollment,
:rollback_without_transaction_enrollment,
- terminator: deprecated_false_terminator,
scope: [:kind, :name]
end
@@ -124,7 +125,7 @@ module ActiveRecord
# # statement will cause a PostgreSQL error, even though the unique
# # constraint is no longer violated:
# Number.create(i: 1)
- # # => "PGError: ERROR: current transaction is aborted, commands
+ # # => "PG::Error: ERROR: current transaction is aborted, commands
# # ignored until end of transaction block"
# end
#
@@ -169,7 +170,7 @@ module ActiveRecord
# writing, the only database that we're aware of that supports true nested
# transactions, is MS-SQL. Because of this, Active Record emulates nested
# transactions by using savepoints on MySQL and PostgreSQL. See
- # http://dev.mysql.com/doc/refman/5.7/en/savepoint.html
+ # https://dev.mysql.com/doc/refman/5.7/en/savepoint.html
# for more information about savepoints.
#
# === \Callbacks
@@ -189,8 +190,8 @@ module ActiveRecord
#
# === Caveats
#
- # If you're on MySQL, then do not use DDL operations in nested transactions
- # blocks that are emulated with savepoints. That is, do not execute statements
+ # If you're on MySQL, then do not use Data Definition Language (DDL) operations in nested
+ # transactions blocks that are emulated with savepoints. That is, do not execute statements
# like 'CREATE TABLE' inside such blocks. This is because MySQL automatically
# releases all savepoints upon executing a DDL operation. When +transaction+
# is finished and tries to release the savepoint it created earlier, a
@@ -274,35 +275,25 @@ module ActiveRecord
set_callback(:rollback_without_transaction_enrollment, :after, *args, &block)
end
- def raise_in_transactional_callbacks
- ActiveSupport::Deprecation.warn('ActiveRecord::Base.raise_in_transactional_callbacks is deprecated and will be removed without replacement.')
- true
- end
-
- def raise_in_transactional_callbacks=(value)
- ActiveSupport::Deprecation.warn('ActiveRecord::Base.raise_in_transactional_callbacks= is deprecated, has no effect and will be removed without replacement.')
- value
- end
-
private
- def set_options_for_callbacks!(args, enforced_options = {})
- options = args.extract_options!.merge!(enforced_options)
- args << options
+ def set_options_for_callbacks!(args, enforced_options = {})
+ options = args.extract_options!.merge!(enforced_options)
+ args << options
- if options[:on]
- fire_on = Array(options[:on])
- assert_valid_transaction_action(fire_on)
- options[:if] = Array(options[:if])
- options[:if] << "transaction_include_any_action?(#{fire_on})"
+ if options[:on]
+ fire_on = Array(options[:on])
+ assert_valid_transaction_action(fire_on)
+ options[:if] = Array(options[:if])
+ options[:if].unshift(-> { transaction_include_any_action?(fire_on) })
+ end
end
- end
- def assert_valid_transaction_action(actions)
- if (actions - ACTIONS).any?
- raise ArgumentError, ":on conditions for after_commit and after_rollback callbacks have to be one of #{ACTIONS}"
+ def assert_valid_transaction_action(actions)
+ if (actions - ACTIONS).any?
+ raise ArgumentError, ":on conditions for after_commit and after_rollback callbacks have to be one of #{ACTIONS}"
+ end
end
- end
end
# See ActiveRecord::Transactions::ClassMethods for detailed documentation.
@@ -315,9 +306,7 @@ module ActiveRecord
end
def save(*) #:nodoc:
- rollback_active_record_state! do
- with_transaction_returning_status { super }
- end
+ with_transaction_returning_status { super }
end
def save!(*) #:nodoc:
@@ -328,17 +317,6 @@ module ActiveRecord
with_transaction_returning_status { super }
end
- # Reset id and @new_record if the transaction rolls back.
- def rollback_active_record_state!
- remember_transaction_record_state
- yield
- rescue Exception
- restore_transaction_record_state
- raise
- ensure
- clear_transaction_record_state
- end
-
def before_committed! # :nodoc:
_run_before_commit_without_transaction_enrollment_callbacks
_run_before_commit_callbacks
@@ -349,11 +327,13 @@ module ActiveRecord
# Ensure that it is not called if the object was never persisted (failed create),
# but call it after the commit of a destroyed object.
def committed!(should_run_callbacks: true) #:nodoc:
- if should_run_callbacks && destroyed? || persisted?
+ if should_run_callbacks && (destroyed? || persisted?)
+ @_committed_already_called = true
_run_commit_without_transaction_enrollment_callbacks
_run_commit_callbacks
end
ensure
+ @_committed_already_called = false
force_clear_transaction_record_state
end
@@ -391,119 +371,113 @@ module ActiveRecord
status = nil
self.class.transaction do
add_to_transaction
- begin
- status = yield
- rescue ActiveRecord::Rollback
- clear_transaction_record_state
- status = nil
- end
-
+ status = yield
raise ActiveRecord::Rollback unless status
end
status
- ensure
- if @transaction_state && @transaction_state.committed?
- clear_transaction_record_state
- end
end
- protected
-
- # Save the new record state and id of a record so it can be restored later if a transaction fails.
- def remember_transaction_record_state #:nodoc:
- @_start_transaction_state[:id] = id
- @_start_transaction_state.reverse_merge!(
- new_record: @new_record,
- destroyed: @destroyed,
- frozen?: frozen?,
- )
- @_start_transaction_state[:level] = (@_start_transaction_state[:level] || 0) + 1
- end
+ private
+ attr_reader :_committed_already_called, :_trigger_update_callback, :_trigger_destroy_callback
+
+ # Save the new record state and id of a record so it can be restored later if a transaction fails.
+ def remember_transaction_record_state
+ @_start_transaction_state.reverse_merge!(
+ id: id,
+ new_record: @new_record,
+ destroyed: @destroyed,
+ frozen?: frozen?,
+ )
+ @_start_transaction_state[:level] = (@_start_transaction_state[:level] || 0) + 1
+ remember_new_record_before_last_commit
+ end
- # Clear the new record state and id of a record.
- def clear_transaction_record_state #:nodoc:
- @_start_transaction_state[:level] = (@_start_transaction_state[:level] || 0) - 1
- force_clear_transaction_record_state if @_start_transaction_state[:level] < 1
- end
+ def remember_new_record_before_last_commit
+ if _committed_already_called
+ @_new_record_before_last_commit = false
+ else
+ @_new_record_before_last_commit = @_start_transaction_state[:new_record]
+ end
+ end
- # Force to clear the transaction record state.
- def force_clear_transaction_record_state #:nodoc:
- @_start_transaction_state.clear
- end
+ # Clear the new record state and id of a record.
+ def clear_transaction_record_state
+ @_start_transaction_state[:level] = (@_start_transaction_state[:level] || 0) - 1
+ force_clear_transaction_record_state if @_start_transaction_state[:level] < 1
+ end
+
+ # Force to clear the transaction record state.
+ def force_clear_transaction_record_state
+ @_start_transaction_state.clear
+ end
- # Restore the new record state and id of a record that was previously saved by a call to save_record_state.
- def restore_transaction_record_state(force = false) #:nodoc:
- unless @_start_transaction_state.empty?
- transaction_level = (@_start_transaction_state[:level] || 0) - 1
- if transaction_level < 1 || force
- restore_state = @_start_transaction_state
- thaw
- @new_record = restore_state[:new_record]
- @destroyed = restore_state[:destroyed]
- pk = self.class.primary_key
- if pk && read_attribute(pk) != restore_state[:id]
- write_attribute(pk, restore_state[:id])
+ # Restore the new record state and id of a record that was previously saved by a call to save_record_state.
+ def restore_transaction_record_state(force = false)
+ unless @_start_transaction_state.empty?
+ transaction_level = (@_start_transaction_state[:level] || 0) - 1
+ if transaction_level < 1 || force
+ restore_state = @_start_transaction_state
+ thaw
+ @new_record = restore_state[:new_record]
+ @destroyed = restore_state[:destroyed]
+ pk = self.class.primary_key
+ if pk && _read_attribute(pk) != restore_state[:id]
+ _write_attribute(pk, restore_state[:id])
+ end
+ freeze if restore_state[:frozen?]
end
- freeze if restore_state[:frozen?]
end
end
- end
-
- # Determine if a record was created or destroyed in a transaction. State should be one of :new_record or :destroyed.
- def transaction_record_state(state) #:nodoc:
- @_start_transaction_state[state]
- end
- # Determine if a transaction included an action for :create, :update, or :destroy. Used in filtering callbacks.
- def transaction_include_any_action?(actions) #:nodoc:
- actions.any? do |action|
- case action
- when :create
- transaction_record_state(:new_record)
- when :destroy
- destroyed?
- when :update
- !(transaction_record_state(:new_record) || destroyed?)
+ # Determine if a transaction included an action for :create, :update, or :destroy. Used in filtering callbacks.
+ def transaction_include_any_action?(actions)
+ actions.any? do |action|
+ case action
+ when :create
+ persisted? && @_new_record_before_last_commit
+ when :update
+ !(@_new_record_before_last_commit || destroyed?) && _trigger_update_callback
+ when :destroy
+ _trigger_destroy_callback
+ end
end
end
- end
-
- private
- def set_transaction_state(state) # :nodoc:
- @transaction_state = state
- end
+ def set_transaction_state(state)
+ @transaction_state = state
+ end
- def has_transactional_callbacks? # :nodoc:
- !_rollback_callbacks.empty? || !_commit_callbacks.empty? || !_before_commit_callbacks.empty?
- end
+ def has_transactional_callbacks?
+ !_rollback_callbacks.empty? || !_commit_callbacks.empty? || !_before_commit_callbacks.empty?
+ end
- # Updates the attributes on this particular Active Record object so that
- # if it's associated with a transaction, then the state of the Active Record
- # object will be updated to reflect the current state of the transaction
- #
- # The +@transaction_state+ variable stores the states of the associated
- # transaction. This relies on the fact that a transaction can only be in
- # one rollback or commit (otherwise a list of states would be required)
- # Each Active Record object inside of a transaction carries that transaction's
- # TransactionState.
- #
- # This method checks to see if the ActiveRecord object's state reflects
- # the TransactionState, and rolls back or commits the Active Record object
- # as appropriate.
- #
- # Since Active Record objects can be inside multiple transactions, this
- # method recursively goes through the parent of the TransactionState and
- # checks if the Active Record object reflects the state of the object.
- def sync_with_transaction_state
- update_attributes_from_transaction_state(@transaction_state)
- end
+ # Updates the attributes on this particular Active Record object so that
+ # if it's associated with a transaction, then the state of the Active Record
+ # object will be updated to reflect the current state of the transaction.
+ #
+ # The <tt>@transaction_state</tt> variable stores the states of the associated
+ # transaction. This relies on the fact that a transaction can only be in
+ # one rollback or commit (otherwise a list of states would be required).
+ # Each Active Record object inside of a transaction carries that transaction's
+ # TransactionState.
+ #
+ # This method checks to see if the ActiveRecord object's state reflects
+ # the TransactionState, and rolls back or commits the Active Record object
+ # as appropriate.
+ #
+ # Since Active Record objects can be inside multiple transactions, this
+ # method recursively goes through the parent of the TransactionState and
+ # checks if the Active Record object reflects the state of the object.
+ def sync_with_transaction_state
+ update_attributes_from_transaction_state(@transaction_state)
+ end
- def update_attributes_from_transaction_state(transaction_state)
- if transaction_state && transaction_state.finalized?
- restore_transaction_record_state if transaction_state.rolledback?
- clear_transaction_record_state
+ def update_attributes_from_transaction_state(transaction_state)
+ if transaction_state && transaction_state.finalized?
+ restore_transaction_record_state(transaction_state.fully_rolledback?) if transaction_state.rolledback?
+ force_clear_transaction_record_state if transaction_state.fully_committed?
+ clear_transaction_record_state if transaction_state.fully_completed?
+ end
end
- end
end
end
diff --git a/activerecord/lib/active_record/translation.rb b/activerecord/lib/active_record/translation.rb
index ddcb5f2a7a..82661a328a 100644
--- a/activerecord/lib/active_record/translation.rb
+++ b/activerecord/lib/active_record/translation.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Translation
include ActiveModel::Translation
@@ -8,7 +10,7 @@ module ActiveRecord
classes = [klass]
return classes if klass == ActiveRecord::Base
- while klass != klass.base_class
+ while !klass.base_class?
classes << klass = klass.superclass
end
classes
diff --git a/activerecord/lib/active_record/type.rb b/activerecord/lib/active_record/type.rb
index 4911d93dd9..03d00006b7 100644
--- a/activerecord/lib/active_record/type.rb
+++ b/activerecord/lib/active_record/type.rb
@@ -1,17 +1,22 @@
-require 'active_model/type'
+# frozen_string_literal: true
-require 'active_record/type/internal/abstract_json'
-require 'active_record/type/internal/timezone'
+require "active_model/type"
-require 'active_record/type/date'
-require 'active_record/type/date_time'
-require 'active_record/type/time'
+require "active_record/type/internal/timezone"
-require 'active_record/type/serialized'
-require 'active_record/type/adapter_specific_registry'
+require "active_record/type/date"
+require "active_record/type/date_time"
+require "active_record/type/decimal_without_scale"
+require "active_record/type/json"
+require "active_record/type/time"
+require "active_record/type/text"
+require "active_record/type/unsigned_integer"
-require 'active_record/type/type_map'
-require 'active_record/type/hash_lookup_type_map'
+require "active_record/type/serialized"
+require "active_record/type/adapter_specific_registry"
+
+require "active_record/type/type_map"
+require "active_record/type/hash_lookup_type_map"
module ActiveRecord
module Type
@@ -37,24 +42,24 @@ module ActiveRecord
registry.lookup(*args, adapter: adapter, **kwargs)
end
+ def default_value # :nodoc:
+ @default_value ||= Value.new
+ end
+
private
- def current_adapter_name
- ActiveRecord::Base.connection.adapter_name.downcase.to_sym
- end
+ def current_adapter_name
+ ActiveRecord::Base.connection.adapter_name.downcase.to_sym
+ end
end
- Helpers = ActiveModel::Type::Helpers
BigInteger = ActiveModel::Type::BigInteger
Binary = ActiveModel::Type::Binary
Boolean = ActiveModel::Type::Boolean
Decimal = ActiveModel::Type::Decimal
- DecimalWithoutScale = ActiveModel::Type::DecimalWithoutScale
Float = ActiveModel::Type::Float
Integer = ActiveModel::Type::Integer
String = ActiveModel::Type::String
- Text = ActiveModel::Type::Text
- UnsignedInteger = ActiveModel::Type::UnsignedInteger
Value = ActiveModel::Type::Value
register(:big_integer, Type::BigInteger, override: false)
@@ -65,6 +70,7 @@ module ActiveRecord
register(:decimal, Type::Decimal, override: false)
register(:float, Type::Float, override: false)
register(:integer, Type::Integer, override: false)
+ register(:json, Type::Json, override: false)
register(:string, Type::String, override: false)
register(:text, Type::Text, override: false)
register(:time, Type::Time, override: false)
diff --git a/activerecord/lib/active_record/type/adapter_specific_registry.rb b/activerecord/lib/active_record/type/adapter_specific_registry.rb
index d440eac619..b300fdfa05 100644
--- a/activerecord/lib/active_record/type/adapter_specific_registry.rb
+++ b/activerecord/lib/active_record/type/adapter_specific_registry.rb
@@ -1,4 +1,6 @@
-require 'active_model/type/registry'
+# frozen_string_literal: true
+
+require "active_model/type/registry"
module ActiveRecord
# :stopdoc:
@@ -10,15 +12,15 @@ module ActiveRecord
private
- def registration_klass
- Registration
- end
+ def registration_klass
+ Registration
+ end
- def find_registration(symbol, *args)
- registrations
- .select { |registration| registration.matches?(symbol, *args) }
- .max
- end
+ def find_registration(symbol, *args)
+ registrations
+ .select { |registration| registration.matches?(symbol, *args) }
+ .max
+ end
end
class Registration
@@ -52,42 +54,42 @@ module ActiveRecord
protected
- attr_reader :name, :block, :adapter, :override
-
- def priority
- result = 0
- if adapter
- result |= 1
- end
- if override
- result |= 2
+ attr_reader :name, :block, :adapter, :override
+
+ def priority
+ result = 0
+ if adapter
+ result |= 1
+ end
+ if override
+ result |= 2
+ end
+ result
end
- result
- end
- def priority_except_adapter
- priority & 0b111111100
- end
+ def priority_except_adapter
+ priority & 0b111111100
+ end
private
- def matches_adapter?(adapter: nil, **)
- (self.adapter.nil? || adapter == self.adapter)
- end
+ def matches_adapter?(adapter: nil, **)
+ (self.adapter.nil? || adapter == self.adapter)
+ end
- def conflicts_with?(other)
- same_priority_except_adapter?(other) &&
- has_adapter_conflict?(other)
- end
+ def conflicts_with?(other)
+ same_priority_except_adapter?(other) &&
+ has_adapter_conflict?(other)
+ end
- def same_priority_except_adapter?(other)
- priority_except_adapter == other.priority_except_adapter
- end
+ def same_priority_except_adapter?(other)
+ priority_except_adapter == other.priority_except_adapter
+ end
- def has_adapter_conflict?(other)
- (override.nil? && other.adapter) ||
- (adapter && other.override.nil?)
- end
+ def has_adapter_conflict?(other)
+ (override.nil? && other.adapter) ||
+ (adapter && other.override.nil?)
+ end
end
class DecorationRegistration < Registration
@@ -110,17 +112,14 @@ module ActiveRecord
super | 4
end
- protected
-
- attr_reader :options, :klass
-
private
+ attr_reader :options, :klass
- def matches_options?(**kwargs)
- options.all? do |key, value|
- kwargs[key] == value
+ def matches_options?(**kwargs)
+ options.all? do |key, value|
+ kwargs[key] == value
+ end
end
- end
end
end
diff --git a/activerecord/lib/active_record/type/date.rb b/activerecord/lib/active_record/type/date.rb
index ccafed054e..8177074a20 100644
--- a/activerecord/lib/active_record/type/date.rb
+++ b/activerecord/lib/active_record/type/date.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Type
class Date < ActiveModel::Type::Date
diff --git a/activerecord/lib/active_record/type/date_time.rb b/activerecord/lib/active_record/type/date_time.rb
index 1fb9380ecd..4acde6b9f8 100644
--- a/activerecord/lib/active_record/type/date_time.rb
+++ b/activerecord/lib/active_record/type/date_time.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Type
class DateTime < ActiveModel::Type::DateTime
diff --git a/activerecord/lib/active_record/type/decimal_without_scale.rb b/activerecord/lib/active_record/type/decimal_without_scale.rb
new file mode 100644
index 0000000000..a207940dc7
--- /dev/null
+++ b/activerecord/lib/active_record/type/decimal_without_scale.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Type
+ class DecimalWithoutScale < ActiveModel::Type::BigInteger # :nodoc:
+ def type
+ :decimal
+ end
+
+ def type_cast_for_schema(value)
+ value.to_s.inspect
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/hash_lookup_type_map.rb b/activerecord/lib/active_record/type/hash_lookup_type_map.rb
index 3b01e3f8ca..db9853fbcc 100644
--- a/activerecord/lib/active_record/type/hash_lookup_type_map.rb
+++ b/activerecord/lib/active_record/type/hash_lookup_type_map.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Type
class HashLookupTypeMap < TypeMap # :nodoc:
@@ -15,9 +17,9 @@ module ActiveRecord
private
- def perform_fetch(type, *args, &block)
- @mapping.fetch(type, block).call(type, *args)
- end
+ def perform_fetch(type, *args, &block)
+ @mapping.fetch(type, block).call(type, *args)
+ end
end
end
end
diff --git a/activerecord/lib/active_record/type/internal/abstract_json.rb b/activerecord/lib/active_record/type/internal/abstract_json.rb
deleted file mode 100644
index 097d1bd363..0000000000
--- a/activerecord/lib/active_record/type/internal/abstract_json.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-module ActiveRecord
- module Type
- module Internal # :nodoc:
- class AbstractJson < ActiveModel::Type::Value # :nodoc:
- include ActiveModel::Type::Helpers::Mutable
-
- def type
- :json
- end
-
- def deserialize(value)
- if value.is_a?(::String)
- ::ActiveSupport::JSON.decode(value) rescue nil
- else
- value
- end
- end
-
- def serialize(value)
- if value.is_a?(::Array) || value.is_a?(::Hash)
- ::ActiveSupport::JSON.encode(value)
- else
- value
- end
- end
-
- def accessor
- ActiveRecord::Store::StringKeyedHashAccessor
- end
- end
- end
- end
-end
diff --git a/activerecord/lib/active_record/type/internal/timezone.rb b/activerecord/lib/active_record/type/internal/timezone.rb
index 947e06158a..3059755752 100644
--- a/activerecord/lib/active_record/type/internal/timezone.rb
+++ b/activerecord/lib/active_record/type/internal/timezone.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Type
module Internal
diff --git a/activerecord/lib/active_record/type/json.rb b/activerecord/lib/active_record/type/json.rb
new file mode 100644
index 0000000000..3f9ff22796
--- /dev/null
+++ b/activerecord/lib/active_record/type/json.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Type
+ class Json < ActiveModel::Type::Value
+ include ActiveModel::Type::Helpers::Mutable
+
+ def type
+ :json
+ end
+
+ def deserialize(value)
+ return value unless value.is_a?(::String)
+ ActiveSupport::JSON.decode(value) rescue nil
+ end
+
+ def serialize(value)
+ ActiveSupport::JSON.encode(value) unless value.nil?
+ end
+
+ def changed_in_place?(raw_old_value, new_value)
+ deserialize(raw_old_value) != new_value
+ end
+
+ def accessor
+ ActiveRecord::Store::StringKeyedHashAccessor
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/serialized.rb b/activerecord/lib/active_record/type/serialized.rb
index 4ff0740cfb..0a2f6cb9fb 100644
--- a/activerecord/lib/active_record/type/serialized.rb
+++ b/activerecord/lib/active_record/type/serialized.rb
@@ -1,6 +1,10 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Type
class Serialized < DelegateClass(ActiveModel::Type::Value) # :nodoc:
+ undef to_yaml if method_defined?(:to_yaml)
+
include ActiveModel::Type::Helpers::Mutable
attr_reader :subtype, :coder
@@ -32,7 +36,7 @@ module ActiveRecord
def changed_in_place?(raw_old_value, value)
return false if value.nil?
- raw_new_value = serialize(value)
+ raw_new_value = encoded(value)
raw_old_value.nil? != raw_new_value.nil? ||
subtype.changed_in_place?(raw_old_value, raw_new_value)
end
@@ -43,15 +47,25 @@ module ActiveRecord
def assert_valid_value(value)
if coder.respond_to?(:assert_valid_value)
- coder.assert_valid_value(value)
+ coder.assert_valid_value(value, action: "serialize")
end
end
+ def force_equality?(value)
+ coder.respond_to?(:object_class) && value.is_a?(coder.object_class)
+ end
+
private
- def default_value?(value)
- value == coder.load(nil)
- end
+ def default_value?(value)
+ value == coder.load(nil)
+ end
+
+ def encoded(value)
+ unless default_value?(value)
+ coder.dump(value)
+ end
+ end
end
end
end
diff --git a/activerecord/lib/active_record/type/text.rb b/activerecord/lib/active_record/type/text.rb
new file mode 100644
index 0000000000..6d19696671
--- /dev/null
+++ b/activerecord/lib/active_record/type/text.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Type
+ class Text < ActiveModel::Type::String # :nodoc:
+ def type
+ :text
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/time.rb b/activerecord/lib/active_record/type/time.rb
index 70988d84ff..f4da1ecf2c 100644
--- a/activerecord/lib/active_record/type/time.rb
+++ b/activerecord/lib/active_record/type/time.rb
@@ -1,8 +1,21 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Type
class Time < ActiveModel::Type::Time
include Internal::Timezone
+
+ class Value < DelegateClass(::Time) # :nodoc:
+ end
+
+ def serialize(value)
+ case value = super
+ when ::Time
+ Value.new(value)
+ else
+ value
+ end
+ end
end
end
end
-
diff --git a/activerecord/lib/active_record/type/type_map.rb b/activerecord/lib/active_record/type/type_map.rb
index 850a7a4e09..fc40b460f0 100644
--- a/activerecord/lib/active_record/type/type_map.rb
+++ b/activerecord/lib/active_record/type/type_map.rb
@@ -1,4 +1,6 @@
-require 'concurrent/map'
+# frozen_string_literal: true
+
+require "concurrent/map"
module ActiveRecord
module Type
@@ -11,7 +13,7 @@ module ActiveRecord
end
def lookup(lookup_key, *args)
- fetch(lookup_key, *args) { default_value }
+ fetch(lookup_key, *args) { Type.default_value }
end
def fetch(lookup_key, *args, &block)
@@ -44,21 +46,17 @@ module ActiveRecord
private
- def perform_fetch(lookup_key, *args)
- matching_pair = @mapping.reverse_each.detect do |key, _|
- key === lookup_key
- end
+ def perform_fetch(lookup_key, *args)
+ matching_pair = @mapping.reverse_each.detect do |key, _|
+ key === lookup_key
+ end
- if matching_pair
- matching_pair.last.call(lookup_key, *args)
- else
- yield lookup_key, *args
+ if matching_pair
+ matching_pair.last.call(lookup_key, *args)
+ else
+ yield lookup_key, *args
+ end
end
- end
-
- def default_value
- @default_value ||= ActiveModel::Type::Value.new
- end
end
end
end
diff --git a/activerecord/lib/active_record/type/unsigned_integer.rb b/activerecord/lib/active_record/type/unsigned_integer.rb
new file mode 100644
index 0000000000..4619528f81
--- /dev/null
+++ b/activerecord/lib/active_record/type/unsigned_integer.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module Type
+ class UnsignedInteger < ActiveModel::Type::Integer # :nodoc:
+ private
+
+ def max_value
+ super * 2
+ end
+
+ def min_value
+ 0
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type_caster.rb b/activerecord/lib/active_record/type_caster.rb
index accc339d00..2e5f45fa3d 100644
--- a/activerecord/lib/active_record/type_caster.rb
+++ b/activerecord/lib/active_record/type_caster.rb
@@ -1,5 +1,7 @@
-require 'active_record/type_caster/map'
-require 'active_record/type_caster/connection'
+# frozen_string_literal: true
+
+require "active_record/type_caster/map"
+require "active_record/type_caster/connection"
module ActiveRecord
module TypeCaster # :nodoc:
diff --git a/activerecord/lib/active_record/type_caster/connection.rb b/activerecord/lib/active_record/type_caster/connection.rb
index 7ed8dcc313..7cf8181d8e 100644
--- a/activerecord/lib/active_record/type_caster/connection.rb
+++ b/activerecord/lib/active_record/type_caster/connection.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module TypeCaster
class Connection # :nodoc:
@@ -12,18 +14,15 @@ module ActiveRecord
connection.type_cast_from_column(column, value)
end
- protected
-
- attr_reader :table_name
- delegate :connection, to: :@klass
-
private
+ attr_reader :table_name
+ delegate :connection, to: :@klass
- def column_for(attribute_name)
- if connection.schema_cache.data_source_exists?(table_name)
- connection.schema_cache.columns_hash(table_name)[attribute_name.to_s]
+ def column_for(attribute_name)
+ if connection.schema_cache.data_source_exists?(table_name)
+ connection.schema_cache.columns_hash(table_name)[attribute_name.to_s]
+ end
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/type_caster/map.rb b/activerecord/lib/active_record/type_caster/map.rb
index 3a367b3999..663cdadb03 100644
--- a/activerecord/lib/active_record/type_caster/map.rb
+++ b/activerecord/lib/active_record/type_caster/map.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module TypeCaster
class Map # :nodoc:
@@ -7,13 +9,12 @@ module ActiveRecord
def type_cast_for_database(attr_name, value)
return value if value.is_a?(Arel::Nodes::BindParam)
- type = types.type_for_attribute(attr_name.to_s)
+ type = types.type_for_attribute(attr_name)
type.serialize(value)
end
- protected
-
- attr_reader :types
+ private
+ attr_reader :types
end
end
end
diff --git a/activerecord/lib/active_record/validations.rb b/activerecord/lib/active_record/validations.rb
index ecaf04e39e..ca27a3f0ab 100644
--- a/activerecord/lib/active_record/validations.rb
+++ b/activerecord/lib/active_record/validations.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
# = Active Record \RecordInvalid
#
@@ -40,13 +42,13 @@ module ActiveRecord
# The validation process on save can be skipped by passing <tt>validate: false</tt>.
# The regular {ActiveRecord::Base#save}[rdoc-ref:Persistence#save] method is replaced
# with this when the validations module is mixed in, which it is by default.
- def save(options={})
+ def save(options = {})
perform_validations(options) ? super : false
end
# Attempts to save the record just like {ActiveRecord::Base#save}[rdoc-ref:Base#save] but
# will raise an ActiveRecord::RecordInvalid exception instead of returning +false+ if the record is not valid.
- def save!(options={})
+ def save!(options = {})
perform_validations(options) ? super : raise_validation_error
end
@@ -68,7 +70,7 @@ module ActiveRecord
alias_method :validate, :valid?
- protected
+ private
def default_validation_context
new_record? ? :create : :update
@@ -78,7 +80,7 @@ module ActiveRecord
raise(RecordInvalid.new(self))
end
- def perform_validations(options={}) # :nodoc:
+ def perform_validations(options = {})
options[:validate] == false || valid?(options[:context])
end
end
diff --git a/activerecord/lib/active_record/validations/absence.rb b/activerecord/lib/active_record/validations/absence.rb
index 641d041f3d..6afb9eabd2 100644
--- a/activerecord/lib/active_record/validations/absence.rb
+++ b/activerecord/lib/active_record/validations/absence.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Validations
class AbsenceValidator < ActiveModel::Validations::AbsenceValidator # :nodoc:
diff --git a/activerecord/lib/active_record/validations/associated.rb b/activerecord/lib/active_record/validations/associated.rb
index b14db85167..3538aeec22 100644
--- a/activerecord/lib/active_record/validations/associated.rb
+++ b/activerecord/lib/active_record/validations/associated.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Validations
class AssociatedValidator < ActiveModel::EachValidator #:nodoc:
@@ -37,7 +39,7 @@ module ActiveRecord
#
# * <tt>:message</tt> - A custom error message (default is: "is invalid").
# * <tt>:on</tt> - Specifies the contexts where this validation is active.
- # Runs in all validation contexts by default (nil). You can pass a symbol
+ # Runs in all validation contexts by default +nil+. You can pass a symbol
# or an array of symbols. (e.g. <tt>on: :create</tt> or
# <tt>on: :custom_validation_context</tt> or
# <tt>on: [:create, :custom_validation_context]</tt>)
diff --git a/activerecord/lib/active_record/validations/length.rb b/activerecord/lib/active_record/validations/length.rb
index 0e0cebce4a..f47b14ae3a 100644
--- a/activerecord/lib/active_record/validations/length.rb
+++ b/activerecord/lib/active_record/validations/length.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Validations
class LengthValidator < ActiveModel::Validations::LengthValidator # :nodoc:
diff --git a/activerecord/lib/active_record/validations/presence.rb b/activerecord/lib/active_record/validations/presence.rb
index ad82ea66c4..75e97e1997 100644
--- a/activerecord/lib/active_record/validations/presence.rb
+++ b/activerecord/lib/active_record/validations/presence.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Validations
class PresenceValidator < ActiveModel::Validations::PresenceValidator # :nodoc:
@@ -44,7 +46,7 @@ module ActiveRecord
# Configuration options:
# * <tt>:message</tt> - A custom error message (default is: "can't be blank").
# * <tt>:on</tt> - Specifies the contexts where this validation is active.
- # Runs in all validation contexts by default (nil). You can pass a symbol
+ # Runs in all validation contexts by default +nil+. You can pass a symbol
# or an array of symbols. (e.g. <tt>on: :create</tt> or
# <tt>on: :custom_validation_context</tt> or
# <tt>on: [:create, :custom_validation_context]</tt>)
@@ -57,7 +59,7 @@ module ActiveRecord
# or <tt>unless: Proc.new { |user| user.signup_step <= 2 }</tt>). The method,
# proc or string should return or evaluate to a +true+ or +false+ value.
# * <tt>:strict</tt> - Specifies whether validation should be strict.
- # See ActiveModel::Validation#validates! for more information.
+ # See ActiveModel::Validations#validates! for more information.
def validates_presence_of(*attr_names)
validates_with PresenceValidator, _merge_attributes(attr_names)
end
diff --git a/activerecord/lib/active_record/validations/uniqueness.rb b/activerecord/lib/active_record/validations/uniqueness.rb
index 4a80cda0b8..2c3a2fb797 100644
--- a/activerecord/lib/active_record/validations/uniqueness.rb
+++ b/activerecord/lib/active_record/validations/uniqueness.rb
@@ -1,3 +1,5 @@
+# frozen_string_literal: true
+
module ActiveRecord
module Validations
class UniquenessValidator < ActiveModel::EachValidator # :nodoc:
@@ -6,24 +8,27 @@ module ActiveRecord
raise ArgumentError, "#{options[:conditions]} was passed as :conditions but is not callable. " \
"Pass a callable instead: `conditions: -> { where(approved: true) }`"
end
- super({ case_sensitive: true }.merge!(options))
+ unless Array(options[:scope]).all? { |scope| scope.respond_to?(:to_sym) }
+ raise ArgumentError, "#{options[:scope]} is not supported format for :scope option. " \
+ "Pass a symbol or an array of symbols instead: `scope: :user_id`"
+ end
+ super
@klass = options[:class]
end
def validate_each(record, attribute, value)
finder_class = find_finder_class_for(record)
- table = finder_class.arel_table
value = map_enum_attribute(finder_class, attribute, value)
- relation = build_relation(finder_class, table, attribute, value)
+ relation = build_relation(finder_class, attribute, value)
if record.persisted?
if finder_class.primary_key
- relation = relation.where.not(finder_class.primary_key => record.id_was || record.id)
+ relation = relation.where.not(finder_class.primary_key => record.id_in_database)
else
- raise UnknownPrimaryKey.new(finder_class, "Can not validate uniqueness for persisted record without primary key.")
+ raise UnknownPrimaryKey.new(finder_class, "Cannot validate uniqueness for persisted record without primary key.")
end
end
- relation = scope_relation(record, table, relation)
+ relation = scope_relation(record, relation)
relation = relation.merge(options[:conditions]) if options[:conditions]
if relation.exists?
@@ -34,13 +39,13 @@ module ActiveRecord
end
end
- protected
+ private
# The check for an existing value should be run from a class that
# isn't abstract. This means working down from the current class
# (self), to the first non-abstract class. Since classes don't know
# their subclasses, we have to build the hierarchy between self and
# the record's class.
- def find_finder_class_for(record) #:nodoc:
+ def find_finder_class_for(record)
class_hierarchy = [record.class]
while class_hierarchy.first != @klass
@@ -50,50 +55,30 @@ module ActiveRecord
class_hierarchy.detect { |klass| !klass.abstract_class? }
end
- def build_relation(klass, table, attribute, value) #:nodoc:
- if reflection = klass._reflect_on_association(attribute)
- attribute = reflection.foreign_key
- value = value.attributes[reflection.klass.primary_key] unless value.nil?
- end
-
- # the attribute may be an aliased attribute
- if klass.attribute_alias?(attribute)
- attribute = klass.attribute_alias(attribute)
- end
-
- attribute_name = attribute.to_s
+ def build_relation(klass, attribute, value)
+ relation = klass.unscoped
+ comparison = relation.bind_attribute(attribute, value) do |attr, bind|
+ return relation.none! if bind.unboundable?
- column = klass.columns_hash[attribute_name]
- cast_type = klass.type_for_attribute(attribute_name)
- value = cast_type.serialize(value)
- value = klass.connection.type_cast(value)
- if value.is_a?(String) && column.limit
- value = value.to_s[0, column.limit]
+ if !options.key?(:case_sensitive) || bind.nil?
+ klass.connection.default_uniqueness_comparison(attr, bind, klass)
+ elsif options[:case_sensitive]
+ klass.connection.case_sensitive_comparison(attr, bind)
+ else
+ # will use SQL LOWER function before comparison, unless it detects a case insensitive collation
+ klass.connection.case_insensitive_comparison(attr, bind)
+ end
end
- comparison = if !options[:case_sensitive] && !value.nil?
- # will use SQL LOWER function before comparison, unless it detects a case insensitive collation
- klass.connection.case_insensitive_comparison(table, attribute, column, value)
- else
- klass.connection.case_sensitive_comparison(table, attribute, column, value)
- end
- if value.nil?
- klass.unscoped.where(comparison)
- else
- bind = Relation::QueryAttribute.new(attribute_name, value, Type::Value.new)
- klass.unscoped.where(comparison, bind)
- end
- rescue RangeError
- klass.none
+ relation.where!(comparison)
end
- def scope_relation(record, table, relation)
+ def scope_relation(record, relation)
Array(options[:scope]).each do |scope_item|
- if reflection = record.class._reflect_on_association(scope_item)
- scope_value = record.send(reflection.foreign_key)
- scope_item = reflection.foreign_key
+ scope_value = if record.class._reflect_on_association(scope_item)
+ record.association(scope_item).reader
else
- scope_value = record._read_attribute(scope_item)
+ record._read_attribute(scope_item)
end
relation = relation.where(scope_item => scope_value)
end
@@ -208,9 +193,7 @@ module ActiveRecord
# | # Boom! We now have a duplicate
# | # title!
#
- # This could even happen if you use transactions with the 'serializable'
- # isolation level. The best way to work around this problem is to add a unique
- # index to the database table using
+ # The best way to work around this problem is to add a unique index to the database table using
# {connection.add_index}[rdoc-ref:ConnectionAdapters::SchemaStatements#add_index].
# In the rare case that a race condition occurs, the database will guarantee
# the field's uniqueness.
@@ -222,7 +205,7 @@ module ActiveRecord
# can catch it and restart the transaction (e.g. by telling the user
# that the title already exists, and asking them to re-enter the title).
# This technique is also known as
- # {optimistic concurrency control}[http://en.wikipedia.org/wiki/Optimistic_concurrency_control].
+ # {optimistic concurrency control}[https://en.wikipedia.org/wiki/Optimistic_concurrency_control].
#
# The bundled ActiveRecord::ConnectionAdapters distinguish unique index
# constraint errors from other types of database errors by throwing an
diff --git a/activerecord/lib/active_record/version.rb b/activerecord/lib/active_record/version.rb
index cf76a13b44..6b0d82d8fc 100644
--- a/activerecord/lib/active_record/version.rb
+++ b/activerecord/lib/active_record/version.rb
@@ -1,4 +1,6 @@
-require_relative 'gem_version'
+# frozen_string_literal: true
+
+require_relative "gem_version"
module ActiveRecord
# Returns the version of the currently loaded ActiveRecord as a <tt>Gem::Version</tt>