diff options
Diffstat (limited to 'activerecord/lib/active_record/relation/batches.rb')
-rw-r--r-- | activerecord/lib/active_record/relation/batches.rb | 67 |
1 files changed, 26 insertions, 41 deletions
diff --git a/activerecord/lib/active_record/relation/batches.rb b/activerecord/lib/active_record/relation/batches.rb index 221bc73680..54587ae18e 100644 --- a/activerecord/lib/active_record/relation/batches.rb +++ b/activerecord/lib/active_record/relation/batches.rb @@ -29,15 +29,15 @@ module ActiveRecord # # ==== Options # * <tt>:batch_size</tt> - Specifies the size of the batch. Default to 1000. - # * <tt>:begin_at</tt> - Specifies the primary key value to start from, inclusive of the value. - # * <tt>:end_at</tt> - Specifies the primary key value to end at, inclusive of the value. + # * <tt>:start</tt> - Specifies the primary key value to start from, inclusive of the value. + # * <tt>:finish</tt> - Specifies the primary key value to end at, inclusive of the value. # This is especially useful if you want multiple workers dealing with # the same processing queue. You can make worker 1 handle all the records # between id 0 and 10,000 and worker 2 handle from 10,000 and beyond - # (by setting the +:begin_at+ and +:end_at+ option on each worker). + # (by setting the +:start+ and +:finish+ option on each worker). # # # Let's process for a batch of 2000 records, skipping the first 2000 rows - # Person.find_each(begin_at: 2000, batch_size: 2000) do |person| + # Person.find_each(start: 2000, batch_size: 2000) do |person| # person.party_all_night! # end # @@ -48,22 +48,15 @@ module ActiveRecord # # NOTE: You can't set the limit either, that's used to control # the batch sizes. - def find_each(begin_at: nil, end_at: nil, batch_size: 1000, start: nil) - if start - begin_at = start - ActiveSupport::Deprecation.warn(<<-MSG.squish) - Passing `start` value to find_each is deprecated, and will be removed in Rails 5.1. - Please pass `begin_at` instead. - MSG - end + def find_each(start: nil, finish: nil, batch_size: 1000) if block_given? - find_in_batches(begin_at: begin_at, end_at: end_at, batch_size: batch_size) do |records| + find_in_batches(start: start, finish: finish, batch_size: batch_size) do |records| records.each { |record| yield record } end else - enum_for(:find_each, begin_at: begin_at, end_at: end_at, batch_size: batch_size) do + enum_for(:find_each, start: start, finish: finish, batch_size: batch_size) do relation = self - apply_limits(relation, begin_at, end_at).size + apply_limits(relation, start, finish).size end end end @@ -88,15 +81,15 @@ module ActiveRecord # # ==== Options # * <tt>:batch_size</tt> - Specifies the size of the batch. Default to 1000. - # * <tt>:begin_at</tt> - Specifies the primary key value to start from, inclusive of the value. - # * <tt>:end_at</tt> - Specifies the primary key value to end at, inclusive of the value. + # * <tt>:start</tt> - Specifies the primary key value to start from, inclusive of the value. + # * <tt>:finish</tt> - Specifies the primary key value to end at, inclusive of the value. # This is especially useful if you want multiple workers dealing with # the same processing queue. You can make worker 1 handle all the records # between id 0 and 10,000 and worker 2 handle from 10,000 and beyond - # (by setting the +:begin_at+ and +:end_at+ option on each worker). + # (by setting the +:start+ and +:finish+ option on each worker). # # # Let's process the next 2000 records - # Person.find_in_batches(begin_at: 2000, batch_size: 2000) do |group| + # Person.find_in_batches(start: 2000, batch_size: 2000) do |group| # group.each { |person| person.party_all_night! } # end # @@ -107,24 +100,16 @@ module ActiveRecord # # NOTE: You can't set the limit either, that's used to control # the batch sizes. - def find_in_batches(begin_at: nil, end_at: nil, batch_size: 1000, start: nil) - if start - begin_at = start - ActiveSupport::Deprecation.warn(<<-MSG.squish) - Passing `start` value to find_in_batches is deprecated, and will be removed in Rails 5.1. - Please pass `begin_at` instead. - MSG - end - + def find_in_batches(start: nil, finish: nil, batch_size: 1000) relation = self unless block_given? - return to_enum(:find_in_batches, begin_at: begin_at, end_at: end_at, batch_size: batch_size) do - total = apply_limits(relation, begin_at, end_at).size + return to_enum(:find_in_batches, start: start, finish: finish, batch_size: batch_size) do + total = apply_limits(relation, start, finish).size (total - 1).div(batch_size) + 1 end end - in_batches(of: batch_size, begin_at: begin_at, end_at: end_at, load: true) do |batch| + in_batches(of: batch_size, start: start, finish: finish, load: true) do |batch| yield batch.to_a end end @@ -153,18 +138,18 @@ module ActiveRecord # ==== Options # * <tt>:of</tt> - Specifies the size of the batch. Default to 1000. # * <tt>:load</tt> - Specifies if the relation should be loaded. Default to false. - # * <tt>:begin_at</tt> - Specifies the primary key value to start from, inclusive of the value. - # * <tt>:end_at</tt> - Specifies the primary key value to end at, inclusive of the value. + # * <tt>:start</tt> - Specifies the primary key value to start from, inclusive of the value. + # * <tt>:finish</tt> - Specifies the primary key value to end at, inclusive of the value. # # This is especially useful if you want to work with the # ActiveRecord::Relation object instead of the array of records, or if # you want multiple workers dealing with the same processing queue. You can # make worker 1 handle all the records between id 0 and 10,000 and worker 2 - # handle from 10,000 and beyond (by setting the +:begin_at+ and +:end_at+ + # handle from 10,000 and beyond (by setting the +:start+ and +:finish+ # option on each worker). # # # Let's process the next 2000 records - # Person.in_batches(of: 2000, begin_at: 2000).update_all(awesome: true) + # Person.in_batches(of: 2000, start: 2000).update_all(awesome: true) # # An example of calling where query method on the relation: # @@ -186,10 +171,10 @@ module ActiveRecord # # NOTE: You can't set the limit either, that's used to control the batch # sizes. - def in_batches(of: 1000, begin_at: nil, end_at: nil, load: false) + def in_batches(of: 1000, start: nil, finish: nil, load: false) relation = self unless block_given? - return BatchEnumerator.new(of: of, begin_at: begin_at, end_at: end_at, relation: self) + return BatchEnumerator.new(of: of, start: start, finish: finish, relation: self) end if logger && (arel.orders.present? || arel.taken.present?) @@ -197,7 +182,7 @@ module ActiveRecord end relation = relation.reorder(batch_order).limit(of) - relation = apply_limits(relation, begin_at, end_at) + relation = apply_limits(relation, start, finish) batch_relation = relation loop do @@ -225,9 +210,9 @@ module ActiveRecord private - def apply_limits(relation, begin_at, end_at) - relation = relation.where(table[primary_key].gteq(begin_at)) if begin_at - relation = relation.where(table[primary_key].lteq(end_at)) if end_at + def apply_limits(relation, start, finish) + relation = relation.where(table[primary_key].gteq(start)) if start + relation = relation.where(table[primary_key].lteq(finish)) if finish relation end |