aboutsummaryrefslogtreecommitdiffstats
path: root/activesupport
diff options
context:
space:
mode:
Diffstat (limited to 'activesupport')
-rw-r--r--activesupport/CHANGELOG.md38
-rw-r--r--activesupport/lib/active_support/cache.rb45
-rw-r--r--activesupport/lib/active_support/cache/redis_cache_store.rb404
-rw-r--r--activesupport/test/cache/behaviors/cache_increment_decrement_behavior.rb8
-rw-r--r--activesupport/test/cache/behaviors/local_cache_behavior.rb6
-rw-r--r--activesupport/test/cache/stores/redis_cache_store_test.rb151
6 files changed, 637 insertions, 15 deletions
diff --git a/activesupport/CHANGELOG.md b/activesupport/CHANGELOG.md
index 889919855c..d8059c860c 100644
--- a/activesupport/CHANGELOG.md
+++ b/activesupport/CHANGELOG.md
@@ -1,3 +1,41 @@
+* Redis cache store.
+
+ # Defaults to `redis://localhost:6379/0`. Only use for dev/test.
+ config.cache_store = :redis_cache_store
+
+ # Supports all common cache store options (:namespace, :compress,
+ # :compress_threshold, :expires_in, :race_condition_tool) and all
+ # Redis options.
+ cache_password = Rails.application.secrets.redis_cache_password
+ config.cache_store = :redis_cache_store, driver: :hiredis,
+ namespace: 'myapp-cache', compress: true, timeout: 1,
+ url: "redis://:#{cache_password}@myapp-cache-1:6379/0"
+
+ # Supports Redis::Distributed with multiple hosts
+ config.cache_store = :redis_cache_store, driver: :hiredis
+ namespace: 'myapp-cache', compress: true,
+ url: %w[
+ redis://myapp-cache-1:6379/0
+ redis://myapp-cache-1:6380/0
+ redis://myapp-cache-2:6379/0
+ redis://myapp-cache-2:6380/0
+ redis://myapp-cache-3:6379/0
+ redis://myapp-cache-3:6380/0
+ ]
+
+ # Or pass a builder block
+ config.cache_store = :redis_cache_store,
+ namespace: 'myapp-cache', compress: true,
+ redis: -> { Redis.new … }
+
+ Deployment note: Take care to use a *dedicated Redis cache* rather
+ than pointing this at your existing Redis server. It won't cope well
+ with mixed usage patterns and it won't expire cache entries by default.
+
+ Redis cache server setup guide: https://redis.io/topics/lru-cache
+
+ *Jeremy Daer*
+
* Allow `Range#include?` on TWZ ranges
In #11474 we prevented TWZ ranges being iterated over which matched
diff --git a/activesupport/lib/active_support/cache.rb b/activesupport/lib/active_support/cache.rb
index 9f91f9b576..9848e0c623 100644
--- a/activesupport/lib/active_support/cache.rb
+++ b/activesupport/lib/active_support/cache.rb
@@ -12,10 +12,11 @@ require "active_support/core_ext/string/inflections"
module ActiveSupport
# See ActiveSupport::Cache::Store for documentation.
module Cache
- autoload :FileStore, "active_support/cache/file_store"
- autoload :MemoryStore, "active_support/cache/memory_store"
- autoload :MemCacheStore, "active_support/cache/mem_cache_store"
- autoload :NullStore, "active_support/cache/null_store"
+ autoload :FileStore, "active_support/cache/file_store"
+ autoload :MemoryStore, "active_support/cache/memory_store"
+ autoload :MemCacheStore, "active_support/cache/mem_cache_store"
+ autoload :NullStore, "active_support/cache/null_store"
+ autoload :RedisCacheStore, "active_support/cache/redis_cache_store"
# These options mean something to all cache implementations. Individual cache
# implementations may support additional options.
@@ -567,14 +568,34 @@ module ActiveSupport
end
end
- # Prefixes a key with the namespace. Namespace and key will be delimited
- # with a colon.
- def normalize_key(key, options)
- key = Cache.expand_cache_key(key)
- namespace = options[:namespace] if options
- prefix = namespace.is_a?(Proc) ? namespace.call : namespace
- key = "#{prefix}:#{key}" if prefix
- key
+ # Expands and namespaces the cache key. May be overridden by
+ # cache stores to do additional normalization.
+ def normalize_key(key, options = nil)
+ namespace_key Cache.expand_cache_key(key), options
+ end
+
+ # Prefix the key with a namespace string:
+ #
+ # namespace_key 'foo', namespace: 'cache'
+ # # => 'cache:foo'
+ #
+ # With a namespace block:
+ #
+ # namespace_key 'foo', namespace: -> { 'cache' }
+ # # => 'cache:foo'
+ def namespace_key(key, options = nil)
+ options = merged_options(options)
+ namespace = options[:namespace]
+
+ if namespace.respond_to?(:call)
+ namespace = namespace.call
+ end
+
+ if namespace
+ "#{namespace}:#{key}"
+ else
+ key
+ end
end
def normalize_version(key, options = nil)
diff --git a/activesupport/lib/active_support/cache/redis_cache_store.rb b/activesupport/lib/active_support/cache/redis_cache_store.rb
new file mode 100644
index 0000000000..358de9203d
--- /dev/null
+++ b/activesupport/lib/active_support/cache/redis_cache_store.rb
@@ -0,0 +1,404 @@
+# frozen_string_literal: true
+
+begin
+ gem "redis", ">= 4.0.1"
+ require "redis"
+ require "redis/distributed"
+rescue LoadError
+ warn "The Redis cache store requires the redis gem, version 4.0.1 or later. Please add it to your Gemfile: `gem \"redis\", \"~> 4.0\"`"
+ raise
+end
+
+# Prefer the hiredis driver but don't require it.
+begin
+ require "redis/connection/hiredis"
+rescue LoadError
+end
+
+require "digest/sha2"
+require "active_support/core_ext/marshal"
+
+module ActiveSupport
+ module Cache
+ # Redis cache store.
+ #
+ # Deployment note: Take care to use a *dedicated Redis cache* rather
+ # than pointing this at your existing Redis server. It won't cope well
+ # with mixed usage patterns and it won't expire cache entries by default.
+ #
+ # Redis cache server setup guide: https://redis.io/topics/lru-cache
+ #
+ # * Supports vanilla Redis, hiredis, and Redis::Distributed.
+ # * Supports Memcached-like sharding across Redises with Redis::Distributed.
+ # * Fault tolerant. If the Redis server is unavailable, no exceptions are
+ # raised. Cache fetches are all misses and writes are dropped.
+ # * Local cache. Hot in-memory primary cache within block/middleware scope.
+ # * `read_/write_multi` support for Redis mget/mset. Use Redis::Distributed
+ # 4.0.1+ for distributed mget support.
+ # * `delete_matched` support for Redis KEYS globs.
+ class RedisCacheStore < Store
+ # Keys are truncated with their own SHA2 digest if they exceed 1kB
+ MAX_KEY_BYTESIZE = 1024
+
+ DEFAULT_REDIS_OPTIONS = {
+ connect_timeout: 20,
+ read_timeout: 1,
+ write_timeout: 1,
+ reconnect_attempts: 0,
+ }
+
+ DEFAULT_ERROR_HANDLER = -> (method:, returning:, exception:) {
+ logger.error { "RedisCacheStore: #{method} failed, returned #{returning.inspect}: #{e.class}: #{e.message}" } if logger
+ }
+
+ DELETE_GLOB_LUA = "for i, name in ipairs(redis.call('KEYS', ARGV[1])) do redis.call('DEL', name); end"
+ private_constant :DELETE_GLOB_LUA
+
+ # Support raw values in the local cache strategy.
+ module LocalCacheWithRaw # :nodoc:
+ private
+ def read_entry(key, options)
+ entry = super
+ if options[:raw] && local_cache && entry
+ entry = deserialize_entry(entry.value)
+ end
+ entry
+ end
+
+ def write_entry(key, entry, options)
+ if options[:raw] && local_cache
+ raw_entry = Entry.new(entry.value.to_s)
+ raw_entry.expires_at = entry.expires_at
+ super(key, raw_entry, options)
+ else
+ super
+ end
+ end
+
+ def write_multi_entries(entries, options)
+ if options[:raw] && local_cache
+ raw_entries = entries.map do |key, entry|
+ raw_entry = Entry.new(entry.value.to_s)
+ raw_entry.expires_at = entry.expires_at
+ end.to_h
+
+ super(raw_entries, options)
+ else
+ super
+ end
+ end
+ end
+
+ prepend Strategy::LocalCache
+ prepend LocalCacheWithRaw
+
+ class << self
+ # Factory method to create a new Redis instance.
+ #
+ # Handles four options: :redis block, :redis instance, single :url
+ # string, and multiple :url strings.
+ #
+ # Option Class Result
+ # :redis Proc -> options[:redis].call
+ # :redis Object -> options[:redis]
+ # :url String -> Redis.new(url: …)
+ # :url Array -> Redis::Distributed.new([{ url: … }, { url: … }, …])
+ #
+ def build_redis(redis: nil, url: nil, **redis_options) #:nodoc:
+ urls = Array(url)
+
+ if redis.respond_to?(:call)
+ redis.call
+ elsif redis
+ redis
+ elsif urls.size > 1
+ build_redis_distributed_client urls: urls, **redis_options
+ else
+ build_redis_client url: urls.first, **redis_options
+ end
+ end
+
+ private
+ def build_redis_distributed_client(urls:, **redis_options)
+ ::Redis::Distributed.new([], DEFAULT_REDIS_OPTIONS.merge(redis_options)).tap do |dist|
+ urls.each { |u| dist.add_node url: u }
+ end
+ end
+
+ def build_redis_client(url:, **redis_options)
+ ::Redis.new DEFAULT_REDIS_OPTIONS.merge(redis_options.merge(url: url))
+ end
+ end
+
+ attr_reader :redis_options
+ attr_reader :max_key_bytesize
+
+ # Creates a new Redis cache store.
+ #
+ # Handles three options: block provided to instantiate, single URL
+ # provided, and multiple URLs provided.
+ #
+ # :redis Proc -> options[:redis].call
+ # :url String -> Redis.new(url: …)
+ # :url Array -> Redis::Distributed.new([{ url: … }, { url: … }, …])
+ #
+ # No namespace is set by default. Provide one if the Redis cache
+ # server is shared with other apps: `namespace: 'myapp-cache'`.
+ #
+ # Compression is enabled by default with a 1kB threshold, so cached
+ # values larger than 1kB are automatically compressed. Disable by
+ # passing `cache: false` or change the threshold by passing
+ # `compress_threshold: 4.kilobytes`.
+ #
+ # No expiry is set on cache entries by default. Redis is expected to
+ # be configured with an eviction policy that automatically deletes
+ # least-recently or -frequently used keys when it reaches max memory.
+ # See https://redis.io/topics/lru-cache for cache server setup.
+ #
+ # Race condition TTL is not set by default. This can be used to avoid
+ # "thundering herd" cache writes when hot cache entries are expired.
+ # See <tt>ActiveSupport::Cache::Store#fetch</tt> for more.
+ def initialize(namespace: nil, compress: true, compress_threshold: 1.kilobyte, expires_in: nil, race_condition_ttl: nil, error_handler: DEFAULT_ERROR_HANDLER, **redis_options)
+ @redis_options = redis_options
+
+ @max_key_bytesize = MAX_KEY_BYTESIZE
+ @error_handler = error_handler
+
+ super namespace: namespace,
+ compress: compress, compress_threshold: compress_threshold,
+ expires_in: expires_in, race_condition_ttl: race_condition_ttl
+ end
+
+ def redis
+ @redis ||= self.class.build_redis(**redis_options)
+ end
+
+ def inspect
+ instance = @redis || @redis_options
+ "<##{self.class} options=#{options.inspect} redis=#{instance.inspect}>"
+ end
+
+ # Cache Store API implementation.
+ #
+ # Read multiple values at once. Returns a hash of requested keys ->
+ # fetched values.
+ def read_multi(*names)
+ if mget_capable?
+ read_multi_mget(*names)
+ else
+ super
+ end
+ end
+
+ # Cache Store API implementation.
+ #
+ # Supports Redis KEYS glob patterns:
+ #
+ # h?llo matches hello, hallo and hxllo
+ # h*llo matches hllo and heeeello
+ # h[ae]llo matches hello and hallo, but not hillo
+ # h[^e]llo matches hallo, hbllo, ... but not hello
+ # h[a-b]llo matches hallo and hbllo
+ #
+ # Use \ to escape special characters if you want to match them verbatim.
+ #
+ # See https://redis.io/commands/KEYS for more.
+ #
+ # Failsafe: Raises errors.
+ def delete_matched(matcher, options = nil)
+ instrument :delete_matched, matcher do
+ case matcher
+ when String
+ redis.eval DELETE_GLOB_LUA, [], [namespace_key(matcher, options)]
+ else
+ raise ArgumentError, "Only Redis glob strings are supported: #{matcher.inspect}"
+ end
+ end
+ end
+
+ # Cache Store API implementation.
+ #
+ # Increment a cached value. This method uses the Redis incr atomic
+ # operator and can only be used on values written with the :raw option.
+ # Calling it on a value not stored with :raw will initialize that value
+ # to zero.
+ #
+ # Failsafe: Raises errors.
+ def increment(name, amount = 1, options = nil)
+ instrument :increment, name, amount: amount do
+ redis.incrby normalize_key(name, options), amount
+ end
+ end
+
+ # Cache Store API implementation.
+ #
+ # Decrement a cached value. This method uses the Redis decr atomic
+ # operator and can only be used on values written with the :raw option.
+ # Calling it on a value not stored with :raw will initialize that value
+ # to zero.
+ #
+ # Failsafe: Raises errors.
+ def decrement(name, amount = 1, options = nil)
+ instrument :decrement, name, amount: amount do
+ redis.decrby normalize_key(name, options), amount
+ end
+ end
+
+ # Cache Store API implementation.
+ #
+ # Removes expired entries. Handled natively by Redis least-recently-/
+ # least-frequently-used expiry, so manual cleanup is not supported.
+ def cleanup(options = nil)
+ super
+ end
+
+ # Clear the entire cache on all Redis servers. Safe to use on
+ # shared servers if the cache is namespaced.
+ #
+ # Failsafe: Raises errors.
+ def clear(options = nil)
+ failsafe :clear do
+ if namespace = merged_options(options)[namespace]
+ delete_matched "*", namespace: namespace
+ else
+ redis.flushdb
+ end
+ end
+ end
+
+ def mget_capable? #:nodoc:
+ set_redis_capabilities unless defined? @mget_capable
+ @mget_capable
+ end
+
+ def mset_capable? #:nodoc:
+ set_redis_capabilities unless defined? @mset_capable
+ @mset_capable
+ end
+
+ private
+ def set_redis_capabilities
+ case redis
+ when Redis::Distributed
+ @mget_capable = true
+ @mset_capable = false
+ else
+ @mget_capable = true
+ @mset_capable = true
+ end
+ end
+
+ # Store provider interface:
+ # Read an entry from the cache.
+ def read_entry(key, options = nil)
+ failsafe :read_entry do
+ deserialize_entry redis.get(key)
+ end
+ end
+
+ def read_multi_mget(*names)
+ options = names.extract_options!
+ options = merged_options(options)
+
+ keys_to_names = names.map { |name| [ normalize_key(name, options), name ] }.to_h
+ values = redis.mget(*keys_to_names.keys)
+
+ keys_to_names.zip(values).each_with_object({}) do |((key, name), value), results|
+ if value
+ entry = deserialize_entry(value)
+ unless entry.nil? || entry.expired? || entry.mismatched?(normalize_version(name, options))
+ results[name] = entry.value
+ end
+ end
+ end
+ end
+
+ # Write an entry to the cache.
+ #
+ # Requires Redis 2.6.12+ for extended SET options.
+ def write_entry(key, entry, unless_exist: false, raw: false, expires_in: nil, race_condition_ttl: nil, **options)
+ value = raw ? entry.value.to_s : serialize_entry(entry)
+
+ # If race condition TTL is in use, ensure that cache entries
+ # stick around a bit longer after they would have expired
+ # so we can purposefully serve stale entries.
+ if race_condition_ttl && expires_in && expires_in > 0 && !raw
+ expires_in += 5.minutes
+ end
+
+ failsafe :write_entry do
+ if unless_exist || expires_in
+ modifiers = {}
+ modifiers[:nx] = unless_exist
+ modifiers[:px] = (1000 * expires_in.to_f).ceil if expires_in
+
+ redis.set key, value, modifiers
+ else
+ redis.set key, value
+ end
+ end
+ end
+
+ # Delete an entry from the cache.
+ def delete_entry(key, options)
+ failsafe :delete_entry, returning: false do
+ redis.del key
+ end
+ end
+
+ # Nonstandard store provider API to write multiple values at once.
+ def write_multi_entries(entries, expires_in: nil, **options)
+ if entries.any?
+ if mset_capable? && expires_in.nil?
+ failsafe :write_multi_entries do
+ redis.mapped_mset(entries)
+ end
+ else
+ super
+ end
+ end
+ end
+
+ # Truncate keys that exceed 1kB.
+ def normalize_key(key, options)
+ truncate_key super
+ end
+
+ def truncate_key(key)
+ if key.bytesize > max_key_bytesize
+ suffix = ":sha2:#{Digest::SHA2.hexdigest(key)}"
+ truncate_at = max_key_bytesize - suffix.bytesize
+ "#{key.byteslice(0, truncate_at)}#{suffix}"
+ else
+ key
+ end
+ end
+
+ def deserialize_entry(raw_value)
+ if raw_value
+ entry = Marshal.load(raw_value) rescue raw_value
+ entry.is_a?(Entry) ? entry : Entry.new(entry)
+ end
+ end
+
+ def serialize_entry(entry)
+ Marshal.dump(entry)
+ end
+
+ def failsafe(method, returning: nil)
+ yield
+ rescue ::Redis::BaseConnectionError => e
+ handle_exception exception: e, method: method, returning: returning
+ returning
+ end
+
+ def handle_exception(exception:, method:, returning:)
+ if @error_handler
+ @error_handler.(method: method, exception: exception, returning: returning)
+ end
+ rescue => failsafe
+ warn "RedisCacheStore ignored exception in handle_exception: #{failsafe.class}: #{failsafe.message}\n #{failsafe.backtrace.join("\n ")}"
+ end
+ end
+ end
+end
diff --git a/activesupport/test/cache/behaviors/cache_increment_decrement_behavior.rb b/activesupport/test/cache/behaviors/cache_increment_decrement_behavior.rb
index 2fa2d7af88..16b7abc679 100644
--- a/activesupport/test/cache/behaviors/cache_increment_decrement_behavior.rb
+++ b/activesupport/test/cache/behaviors/cache_increment_decrement_behavior.rb
@@ -8,7 +8,9 @@ module CacheIncrementDecrementBehavior
assert_equal 2, @cache.read("foo").to_i
assert_equal 3, @cache.increment("foo")
assert_equal 3, @cache.read("foo").to_i
- assert_nil @cache.increment("bar")
+
+ missing = @cache.increment("bar")
+ assert(missing.nil? || missing == 1)
end
def test_decrement
@@ -18,6 +20,8 @@ module CacheIncrementDecrementBehavior
assert_equal 2, @cache.read("foo").to_i
assert_equal 1, @cache.decrement("foo")
assert_equal 1, @cache.read("foo").to_i
- assert_nil @cache.decrement("bar")
+
+ missing = @cache.decrement("bar")
+ assert(missing.nil? || missing == -1)
end
end
diff --git a/activesupport/test/cache/behaviors/local_cache_behavior.rb b/activesupport/test/cache/behaviors/local_cache_behavior.rb
index 8dec8090b1..f7302df4c8 100644
--- a/activesupport/test/cache/behaviors/local_cache_behavior.rb
+++ b/activesupport/test/cache/behaviors/local_cache_behavior.rb
@@ -20,7 +20,11 @@ module LocalCacheBehavior
end
def test_cleanup_clears_local_cache_but_not_remote_cache
- skip unless @cache.class.instance_methods(false).include?(:cleanup)
+ begin
+ @cache.cleanup
+ rescue NotImplementedError
+ skip
+ end
@cache.with_local_cache do
@cache.write("foo", "bar")
diff --git a/activesupport/test/cache/stores/redis_cache_store_test.rb b/activesupport/test/cache/stores/redis_cache_store_test.rb
new file mode 100644
index 0000000000..988de9207f
--- /dev/null
+++ b/activesupport/test/cache/stores/redis_cache_store_test.rb
@@ -0,0 +1,151 @@
+# frozen_string_literal: true
+
+require "abstract_unit"
+require "active_support/cache"
+require "active_support/cache/redis_cache_store"
+require_relative "../behaviors"
+
+module ActiveSupport::Cache::RedisCacheStoreTests
+ class LookupTest < ActiveSupport::TestCase
+ test "may be looked up as :redis_cache_store" do
+ assert_kind_of ActiveSupport::Cache::RedisCacheStore,
+ ActiveSupport::Cache.lookup_store(:redis_cache_store)
+ end
+ end
+
+ class InitializationTest < ActiveSupport::TestCase
+ test "omitted URL uses Redis client with default settings" do
+ assert_called_with Redis, :new, [
+ url: nil,
+ connect_timeout: 20, read_timeout: 1, write_timeout: 1,
+ reconnect_attempts: 0,
+ ] do
+ build
+ end
+ end
+
+ test "no URLs uses Redis client with default settings" do
+ assert_called_with Redis, :new, [
+ url: nil,
+ connect_timeout: 20, read_timeout: 1, write_timeout: 1,
+ reconnect_attempts: 0,
+ ] do
+ build url: []
+ end
+ end
+
+ test "singular URL uses Redis client" do
+ assert_called_with Redis, :new, [
+ url: "redis://localhost:6379/0",
+ connect_timeout: 20, read_timeout: 1, write_timeout: 1,
+ reconnect_attempts: 0,
+ ] do
+ build url: "redis://localhost:6379/0"
+ end
+ end
+
+ test "one URL uses Redis client" do
+ assert_called_with Redis, :new, [
+ url: "redis://localhost:6379/0",
+ connect_timeout: 20, read_timeout: 1, write_timeout: 1,
+ reconnect_attempts: 0,
+ ] do
+ build url: %w[ redis://localhost:6379/0 ]
+ end
+ end
+
+ test "multiple URLs uses Redis::Distributed client" do
+ assert_called_with Redis, :new, [
+ [ url: "redis://localhost:6379/0",
+ connect_timeout: 20, read_timeout: 1, write_timeout: 1,
+ reconnect_attempts: 0 ],
+ [ url: "redis://localhost:6379/1",
+ connect_timeout: 20, read_timeout: 1, write_timeout: 1,
+ reconnect_attempts: 0 ],
+ ], returns: Redis.new do
+ @cache = build url: %w[ redis://localhost:6379/0 redis://localhost:6379/1 ]
+ assert_kind_of ::Redis::Distributed, @cache.redis
+ end
+ end
+
+ test "block argument uses yielded client" do
+ block = -> { :custom_redis_client }
+ assert_called block, :call do
+ build redis: block
+ end
+ end
+
+ private
+ def build(**kwargs)
+ ActiveSupport::Cache::RedisCacheStore.new(**kwargs).tap do |cache|
+ cache.redis
+ end
+ end
+ end
+
+ class StoreTest < ActiveSupport::TestCase
+ setup do
+ @namespace = "namespace"
+
+ @cache = ActiveSupport::Cache::RedisCacheStore.new(timeout: 0.1, namespace: @namespace, expires_in: 60)
+ #@cache.logger = Logger.new($stdout) # For test debugging
+
+ # For LocalCacheBehavior tests
+ @peek = ActiveSupport::Cache::RedisCacheStore.new(timeout: 0.1, namespace: @namespace)
+ end
+
+ teardown do
+ @cache.clear
+ @cache.redis.disconnect!
+ end
+ end
+
+ class RedisCacheStoreCommonBehaviorTest < StoreTest
+ include CacheStoreBehavior
+ include CacheStoreVersionBehavior
+ include LocalCacheBehavior
+ include CacheIncrementDecrementBehavior
+ include AutoloadingCacheBehavior
+ end
+
+ # Separate test class so we can omit the namespace which causes expected,
+ # appropriate complaints about incompatible string encodings.
+ class KeyEncodingSafetyTest < StoreTest
+ include EncodedKeyCacheBehavior
+
+ setup do
+ @cache = ActiveSupport::Cache::RedisCacheStore.new(timeout: 0.1)
+ @cache.logger = nil
+ end
+ end
+
+ class StoreAPITest < StoreTest
+ end
+
+ class FailureSafetyTest < StoreTest
+ test "fetch read failure returns nil" do
+ end
+
+ test "fetch read failure does not attempt to write" do
+ end
+
+ test "write failure returns nil" do
+ end
+ end
+
+ class DeleteMatchedTest < StoreTest
+ test "deletes keys matching glob" do
+ @cache.write("foo", "bar")
+ @cache.write("fu", "baz")
+ @cache.delete_matched("foo*")
+ assert !@cache.exist?("foo")
+ assert @cache.exist?("fu")
+ end
+
+ test "fails with regexp matchers" do
+ assert_raise ArgumentError do
+ @cache.delete_matched(/OO/i)
+ end
+ end
+ end
+end