diff options
Diffstat (limited to 'activesupport/lib/active_support/cache.rb')
-rw-r--r-- | activesupport/lib/active_support/cache.rb | 220 |
1 files changed, 138 insertions, 82 deletions
diff --git a/activesupport/lib/active_support/cache.rb b/activesupport/lib/active_support/cache.rb index 732ab4b7df..c5633d902f 100644 --- a/activesupport/lib/active_support/cache.rb +++ b/activesupport/lib/active_support/cache.rb @@ -3,7 +3,6 @@ require 'zlib' require 'active_support/core_ext/array/extract_options' require 'active_support/core_ext/array/wrap' require 'active_support/core_ext/benchmark' -require 'active_support/core_ext/exception' require 'active_support/core_ext/class/attribute_accessors' require 'active_support/core_ext/numeric/bytes' require 'active_support/core_ext/numeric/time' @@ -57,16 +56,7 @@ module ActiveSupport case store when Symbol - store_class_name = store.to_s.camelize - store_class = - begin - require "active_support/cache/#{store}" - rescue LoadError => e - raise "Could not find cache store adapter for #{store} (#{e})" - else - ActiveSupport::Cache.const_get(store_class_name) - end - store_class.new(*parameters) + retrieve_store_class(store).new(*parameters) when nil ActiveSupport::Cache::MemoryStore.new else @@ -74,6 +64,18 @@ module ActiveSupport end end + # Expands out the +key+ argument into a key that can be used for the + # cache store. Optionally accepts a namespace, and all keys will be + # scoped within that namespace. + # + # If the +key+ argument provided is an array, or responds to +to_a+, then + # each of elements in the array will be turned into parameters/keys and + # concatenated into a single key. For example: + # + # expand_cache_key([:foo, :bar]) # => "foo/bar" + # expand_cache_key([:foo, :bar], "namespace") # => "namespace/foo/bar" + # + # The +key+ argument can also respond to +cache_key+ or +to_param+. def expand_cache_key(key, namespace = nil) expanded_cache_key = namespace ? "#{namespace}/" : "" @@ -95,6 +97,16 @@ module ActiveSupport else key.to_param end.to_s end + + # Obtains the specified cache store class, given the name of the +store+. + # Raises an error when the store class cannot be found. + def retrieve_store_class(store) + require "active_support/cache/#{store}" + rescue LoadError => e + raise "Could not find cache store adapter for #{store} (#{e})" + else + ActiveSupport::Cache.const_get(store.to_s.camelize) + end end # An abstract cache store class. There are multiple cache store @@ -182,10 +194,10 @@ module ActiveSupport # the cache with the given key, then that data is returned. # # If there is no such data in the cache (a cache miss), then +nil+ will be - # returned. However, if a block has been passed, that block will be run - # in the event of a cache miss. The return value of the block will be - # written to the cache under the given cache key, and that return value - # will be returned. + # returned. However, if a block has been passed, that block will be passed + # the key and executed in the event of a cache miss. The return value of the + # block will be written to the cache under the given cache key, and that + # return value will be returned. # # cache.write('today', 'Monday') # cache.fetch('today') # => "Monday" @@ -216,13 +228,13 @@ module ActiveSupport # # Setting <tt>:race_condition_ttl</tt> is very useful in situations where # a cache entry is used very frequently and is under heavy load. If a - # cache expires and due to heavy load seven different processes will try + # cache expires and due to heavy load several different processes will try # to read data natively and then they all will try to write to cache. To # avoid that case the first process to find an expired cache entry will # bump the cache expiration time by the value set in <tt>:race_condition_ttl</tt>. # Yes, this process is extending the time for a stale value by another few # seconds. Because of extended life of the previous cache, other processes - # will continue to use slightly stale data for a just a big longer. In the + # will continue to use slightly stale data for a just a bit longer. In the # meantime that first process will go ahead and will write into cache the # new value. After that all the processes will start getting new value. # The key is to keep <tt>:race_condition_ttl</tt> small. @@ -276,34 +288,14 @@ module ActiveSupport if block_given? options = merged_options(options) key = namespaced_key(name, options) - unless options[:force] - entry = instrument(:read, name, options) do |payload| - payload[:super_operation] = :fetch if payload - read_entry(key, options) - end - end - if entry && entry.expired? - race_ttl = options[:race_condition_ttl].to_i - if race_ttl && (Time.now - entry.expires_at <= race_ttl) - # When an entry has :race_condition_ttl defined, put the stale entry back into the cache - # for a brief period while the entry is begin recalculated. - entry.expires_at = Time.now + race_ttl - write_entry(key, entry, :expires_in => race_ttl * 2) - else - delete_entry(key, options) - end - entry = nil - end + + cached_entry = find_cached_entry(key, name, options) unless options[:force] + entry = handle_expired_entry(cached_entry, key, options) if entry - instrument(:fetch_hit, name, options) { |payload| } - entry.value + get_entry_value(entry, name, options) else - result = instrument(:generate, name, options) do |payload| - yield - end - write(name, result, options) - result + save_block_result_to_cache(name, options) { |_name| yield _name } end else read(name, options) @@ -360,12 +352,40 @@ module ActiveSupport results end + # Fetches data from the cache, using the given keys. If there is data in + # the cache with the given keys, then that data is returned. Otherwise, + # the supplied block is called for each key for which there was no data, + # and the result will be written to the cache and returned. + # + # Options are passed to the underlying cache implementation. + # + # Returns an array with the data for each of the names. For example: + # + # cache.write("bim", "bam") + # cache.fetch_multi("bim", "boom") {|key| key * 2 } + # #=> ["bam", "boomboom"] + # + def fetch_multi(*names) + options = names.extract_options! + options = merged_options(options) + + results = read_multi(*names, options) + + names.map do |name| + results.fetch(name) do + value = yield name + write(name, value, options) + value + end + end + end + # Writes the value to the cache, with the key. # # Options are passed to the underlying cache implementation. def write(name, value, options = nil) options = merged_options(options) - instrument(:write, name, options) do |payload| + instrument(:write, name, options) do entry = Entry.new(value, options) write_entry(namespaced_key(name, options), entry, options) end @@ -376,7 +396,7 @@ module ActiveSupport # Options are passed to the underlying cache implementation. def delete(name, options = nil) options = merged_options(options) - instrument(:delete, name) do |payload| + instrument(:delete, name) do delete_entry(namespaced_key(name, options), options) end end @@ -386,9 +406,9 @@ module ActiveSupport # Options are passed to the underlying cache implementation. def exist?(name, options = nil) options = merged_options(options) - instrument(:exist?, name) do |payload| + instrument(:exist?, name) do entry = read_entry(namespaced_key(name, options), options) - entry && !entry.expired? + (entry && !entry.expired?) || false end end @@ -532,6 +552,42 @@ module ActiveSupport return unless logger && logger.debug? && !silence? logger.debug("Cache #{operation}: #{key}#{options.blank? ? "" : " (#{options.inspect})"}") end + + def find_cached_entry(key, name, options) + instrument(:read, name, options) do |payload| + payload[:super_operation] = :fetch if payload + read_entry(key, options) + end + end + + def handle_expired_entry(entry, key, options) + if entry && entry.expired? + race_ttl = options[:race_condition_ttl].to_i + if race_ttl && (Time.now.to_f - entry.expires_at <= race_ttl) + # When an entry has :race_condition_ttl defined, put the stale entry back into the cache + # for a brief period while the entry is begin recalculated. + entry.expires_at = Time.now + race_ttl + write_entry(key, entry, :expires_in => race_ttl * 2) + else + delete_entry(key, options) + end + entry = nil + end + entry + end + + def get_entry_value(entry, name, options) + instrument(:fetch_hit, name, options) { |payload| } + entry.value + end + + def save_block_result_to_cache(name, options) + result = instrument(:generate, name, options) do |payload| + yield(name) + end + write(name, result, options) + result + end end # This class is used to represent cache entries. Cache entries have a value and an optional @@ -547,42 +603,42 @@ module ActiveSupport # +:compress+, +:compress_threshold+, and +:expires_in+. def initialize(value, options = {}) if should_compress?(value, options) - @v = compress(value) - @c = true + @value = compress(value) + @compressed = true else - @v = value - end - if expires_in = options[:expires_in] - @x = (Time.now + expires_in).to_i + @value = value end + @created_at = Time.now.to_f + @expires_in = options[:expires_in] + @expires_in = @expires_in.to_f if @expires_in end def value - convert_version_3_entry! if defined?(@value) - compressed? ? uncompress(@v) : @v + convert_version_4beta1_entry! if defined?(@v) + compressed? ? uncompress(@value) : @value end # Check if the entry is expired. The +expires_in+ parameter can override # the value set when the entry was created. def expired? - convert_version_3_entry! if defined?(@value) - if defined?(@x) - @x && @x < Time.now.to_i - else - false - end + convert_version_4beta1_entry! if defined?(@value) + @expires_in && @created_at + @expires_in <= Time.now.to_f end def expires_at - Time.at(@x) if defined?(@x) + @expires_in ? @created_at + @expires_in : nil end def expires_at=(value) - @x = value.to_i + if value + @expires_in = value.to_f - @created_at + else + @expires_in = nil + end end # Returns the size of the cached value. This could be less than - # <tt>value.size</tt> if the data is compressed. + # <tt>value.size</tt> if the data is compressed. def size if defined?(@s) @s @@ -591,9 +647,9 @@ module ActiveSupport when NilClass 0 when String - @v.bytesize + @value.bytesize else - @s = Marshal.dump(@v).bytesize + @s = Marshal.dump(@value).bytesize end end end @@ -601,12 +657,12 @@ module ActiveSupport # Duplicate the value in a class. This is used by cache implementations that don't natively # serialize entries to protect against accidental cache modifications. def dup_value! - convert_version_3_entry! if defined?(@value) - if @v && !compressed? && !(@v.is_a?(Numeric) || @v == true || @v == false) - if @v.is_a?(String) - @v = @v.dup + convert_version_4beta1_entry! if defined?(@v) + if @value && !compressed? && !(@value.is_a?(Numeric) || @value == true || @value == false) + if @value.is_a?(String) + @value = @value.dup else - @v = Marshal.load(Marshal.dump(@v)) + @value = Marshal.load(Marshal.dump(@value)) end end end @@ -622,7 +678,7 @@ module ActiveSupport end def compressed? - defined?(@c) ? @c : false + defined?(@compressed) ? @compressed : false end def compress(value) @@ -635,19 +691,19 @@ module ActiveSupport # The internals of this method changed between Rails 3.x and 4.0. This method provides the glue # to ensure that cache entries created under the old version still work with the new class definition. - def convert_version_3_entry! - if defined?(@value) - @v = @value - remove_instance_variable(:@value) + def convert_version_4beta1_entry! + if defined?(@v) + @value = @v + remove_instance_variable(:@v) end - if defined?(@compressed) - @c = @compressed - remove_instance_variable(:@compressed) + if defined?(@c) + @compressed = @c + remove_instance_variable(:@c) end - if defined?(@expires_in) && defined?(@created_at) && @expires_in && @created_at - @x = (@created_at + @expires_in).to_i - remove_instance_variable(:@created_at) - remove_instance_variable(:@expires_in) + if defined?(@x) && @x + @created_at ||= Time.now.to_f + @expires_in = @x - @created_at + remove_instance_variable(:@x) end end end |