aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--actionpack/CHANGELOG6
-rw-r--r--actionpack/README2
-rw-r--r--actionpack/lib/action_controller/caching.rb718
-rw-r--r--actionpack/lib/action_controller/caching/actions.rb148
-rw-r--r--actionpack/lib/action_controller/caching/fragments.rb153
-rw-r--r--actionpack/lib/action_controller/caching/pages.rb141
-rw-r--r--actionpack/lib/action_controller/caching/sql_cache.rb18
-rw-r--r--actionpack/lib/action_controller/caching/sweeping.rb90
-rw-r--r--actionpack/lib/action_view/helpers/cache_helper.rb16
-rw-r--r--actionpack/test/controller/caching_test.rb52
-rw-r--r--actionpack/test/controller/fragment_store_setting_test.rb47
-rw-r--r--activerecord/CHANGELOG2
-rwxr-xr-xactiverecord/lib/active_record/base.rb16
-rw-r--r--activesupport/CHANGELOG6
-rw-r--r--activesupport/lib/active_support.rb3
-rw-r--r--activesupport/lib/active_support/cache.rb121
-rw-r--r--activesupport/lib/active_support/cache/compressed_mem_cache_store.rb15
-rw-r--r--activesupport/lib/active_support/cache/drb_store.rb15
-rw-r--r--activesupport/lib/active_support/cache/file_store.rb65
-rw-r--r--activesupport/lib/active_support/cache/mem_cache_store.rb51
-rw-r--r--activesupport/lib/active_support/cache/memory_store.rb29
-rw-r--r--activesupport/lib/active_support/core_ext/date/conversions.rb1
-rw-r--r--activesupport/lib/active_support/core_ext/hash/conversions.rb2
-rw-r--r--activesupport/lib/active_support/core_ext/time/conversions.rb1
-rw-r--r--activesupport/lib/active_support/gzip.rb22
-rw-r--r--activesupport/lib/active_support/vendor.rb6
-rw-r--r--activesupport/lib/active_support/vendor/memcache-client-1.5.0/memcache.rb832
-rw-r--r--activesupport/test/caching_test.rb27
-rw-r--r--railties/CHANGELOG2
-rw-r--r--railties/lib/initializer.rb63
30 files changed, 1882 insertions, 788 deletions
diff --git a/actionpack/CHANGELOG b/actionpack/CHANGELOG
index 0f7df53f6b..7ce2fb5c5b 100644
--- a/actionpack/CHANGELOG
+++ b/actionpack/CHANGELOG
@@ -1,5 +1,11 @@
*SVN*
+* All fragment cache keys are now by default prefixed with the "views/" namespace [DHH]
+
+* Moved the caching stores from ActionController::Caching::Fragments::* to ActiveSupport::Cache::*. If you're explicitly referring to a store, like ActionController::Caching::Fragments::MemoryStore, you need to update that reference with ActiveSupport::Cache::MemoryStore [DHH]
+
+* Deprecated ActionController::Base.fragment_cache_store for ActionController::Base.cache_store [DHH]
+
* Made fragment caching in views work for rjs and builder as well #6642 [zsombor]
* Fixed rendering of partials with layout when done from site layout #9209 [antramm]
diff --git a/actionpack/README b/actionpack/README
index 72f0f6241a..2746c3cc43 100644
--- a/actionpack/README
+++ b/actionpack/README
@@ -97,7 +97,7 @@ A short rundown of the major features:
class WeblogController < ActionController::Base
before_filter :authenticate, :cache, :audit
- after_filter { |c| c.response.body = GZip::compress(c.response.body) }
+ after_filter { |c| c.response.body = Gzip::compress(c.response.body) }
after_filter LocalizeFilter
def index
diff --git a/actionpack/lib/action_controller/caching.rb b/actionpack/lib/action_controller/caching.rb
index 024f633d7c..7929886781 100644
--- a/actionpack/lib/action_controller/caching.rb
+++ b/actionpack/lib/action_controller/caching.rb
@@ -2,6 +2,13 @@ require 'fileutils'
require 'uri'
require 'set'
+require 'action_controller/caching/pages'
+require 'action_controller/caching/actions'
+require 'action_controller/caching/sql_cache'
+require 'action_controller/caching/sweeping'
+require 'action_controller/caching/fragments'
+
+
module ActionController #:nodoc:
# Caching is a cheap way of speeding up slow applications by keeping the result of calculations, renderings, and database calls
# around for subsequent requests. Action Controller affords you three approaches in varying levels of granularity: Page, Action, Fragment.
@@ -9,701 +16,56 @@ module ActionController #:nodoc:
# You can read more about each approach and the sweeping assistance by clicking the modules below.
#
# Note: To turn off all caching and sweeping, set Base.perform_caching = false.
+ #
+ #
+ # == Caching stores
+ #
+ # All the caching stores from ActiveSupport::Cache is available to be used as backends for Action Controller caching.
+ #
+ # Configuration examples (MemoryStore is the default):
+ #
+ # ActionController::Base.cache_store = :memory_store
+ # ActionController::Base.cache_store = :file_store, "/path/to/cache/directory"
+ # ActionController::Base.cache_store = :drb_store, "druby://localhost:9192"
+ # ActionController::Base.cache_store = :mem_cache_store, "localhost"
+ # ActionController::Base.cache_store = MyOwnStore.new("parameter")
module Caching
def self.included(base) #:nodoc:
base.class_eval do
- include Pages, Actions, Fragments
+ @@cache_store = nil
+ cattr_reader :cache_store
- if defined? ActiveRecord
- include Sweeping, SqlCache
+ # Defines the storage option for cached fragments
+ def self.cache_store=(store_option)
+ @@cache_store = ActiveSupport::Cache.lookup_store(store_option)
end
+ include Pages, Actions, Fragments
+ include Sweeping, SqlCache if defined?(ActiveRecord)
+
@@perform_caching = true
cattr_accessor :perform_caching
- end
- end
-
- # Page caching is an approach to caching where the entire action output of is stored as a HTML file that the web server
- # can serve without going through the Action Pack. This can be as much as 100 times faster than going through the process of dynamically
- # generating the content. Unfortunately, this incredible speed-up is only available to stateless pages where all visitors
- # are treated the same. Content management systems -- including weblogs and wikis -- have many pages that are a great fit
- # for this approach, but account-based systems where people log in and manipulate their own data are often less likely candidates.
- #
- # Specifying which actions to cache is done through the <tt>caches</tt> class method:
- #
- # class WeblogController < ActionController::Base
- # caches_page :show, :new
- # end
- #
- # This will generate cache files such as weblog/show/5 and weblog/new, which match the URLs used to trigger the dynamic
- # generation. This is how the web server is able pick up a cache file when it exists and otherwise let the request pass on to
- # the Action Pack to generate it.
- #
- # Expiration of the cache is handled by deleting the cached file, which results in a lazy regeneration approach where the cache
- # is not restored before another hit is made against it. The API for doing so mimics the options from url_for and friends:
- #
- # class WeblogController < ActionController::Base
- # def update
- # List.update(params[:list][:id], params[:list])
- # expire_page :action => "show", :id => params[:list][:id]
- # redirect_to :action => "show", :id => params[:list][:id]
- # end
- # end
- #
- # Additionally, you can expire caches using Sweepers that act on changes in the model to determine when a cache is supposed to be
- # expired.
- #
- # == Setting the cache directory
- #
- # The cache directory should be the document root for the web server and is set using Base.page_cache_directory = "/document/root".
- # For Rails, this directory has already been set to RAILS_ROOT + "/public".
- #
- # == Setting the cache extension
- #
- # By default, the cache extension is .html, which makes it easy for the cached files to be picked up by the web server. If you want
- # something else, like .php or .shtml, just set Base.page_cache_extension.
- module Pages
- def self.included(base) #:nodoc:
- base.extend(ClassMethods)
- base.class_eval do
- @@page_cache_directory = defined?(RAILS_ROOT) ? "#{RAILS_ROOT}/public" : ""
- cattr_accessor :page_cache_directory
-
- @@page_cache_extension = '.html'
- cattr_accessor :page_cache_extension
- end
- end
-
- module ClassMethods
- # Expires the page that was cached with the +path+ as a key. Example:
- # expire_page "/lists/show"
- def expire_page(path)
- return unless perform_caching
-
- benchmark "Expired page: #{page_cache_file(path)}" do
- File.delete(page_cache_path(path)) if File.exist?(page_cache_path(path))
- end
- end
-
- # Manually cache the +content+ in the key determined by +path+. Example:
- # cache_page "I'm the cached content", "/lists/show"
- def cache_page(content, path)
- return unless perform_caching
-
- benchmark "Cached page: #{page_cache_file(path)}" do
- FileUtils.makedirs(File.dirname(page_cache_path(path)))
- File.open(page_cache_path(path), "wb+") { |f| f.write(content) }
- end
- end
- # Caches the +actions+ using the page-caching approach that'll store the cache in a path within the page_cache_directory that
- # matches the triggering url.
- def caches_page(*actions)
- return unless perform_caching
- actions = actions.map(&:to_s)
- after_filter { |c| c.cache_page if actions.include?(c.action_name) }
+ def self.cache_configured?
+ perform_caching && cache_store
end
-
- private
- def page_cache_file(path)
- name = (path.empty? || path == "/") ? "/index" : URI.unescape(path.chomp('/'))
- name << page_cache_extension unless (name.split('/').last || name).include? '.'
- return name
- end
-
- def page_cache_path(path)
- page_cache_directory + page_cache_file(path)
- end
end
-
- # Expires the page that was cached with the +options+ as a key. Example:
- # expire_page :controller => "lists", :action => "show"
- def expire_page(options = {})
- return unless perform_caching
-
- if options.is_a?(Hash)
- if options[:action].is_a?(Array)
- options[:action].dup.each do |action|
- self.class.expire_page(url_for(options.merge(:only_path => true, :skip_relative_url_root => true, :action => action)))
- end
- else
- self.class.expire_page(url_for(options.merge(:only_path => true, :skip_relative_url_root => true)))
- end
- else
- self.class.expire_page(options)
- end
- end
-
- # Manually cache the +content+ in the key determined by +options+. If no content is provided, the contents of response.body is used
- # If no options are provided, the requested url is used. Example:
- # cache_page "I'm the cached content", :controller => "lists", :action => "show"
- def cache_page(content = nil, options = nil)
- return unless perform_caching && caching_allowed
-
- path = case options
- when Hash
- url_for(options.merge(:only_path => true, :skip_relative_url_root => true, :format => params[:format]))
- when String
- options
- else
- request.path
- end
-
- self.class.cache_page(content || response.body, path)
- end
-
- private
- def caching_allowed
- request.get? && response.headers['Status'].to_i == 200
- end
end
- # Action caching is similar to page caching by the fact that the entire output of the response is cached, but unlike page caching,
- # every request still goes through the Action Pack. The key benefit of this is that filters are run before the cache is served, which
- # allows for authentication and other restrictions on whether someone is allowed to see the cache. Example:
- #
- # class ListsController < ApplicationController
- # before_filter :authenticate, :except => :public
- # caches_page :public
- # caches_action :show, :feed
- # end
- #
- # In this example, the public action doesn't require authentication, so it's possible to use the faster page caching method. But both the
- # show and feed action are to be shielded behind the authenticate filter, so we need to implement those as action caches.
- #
- # Action caching internally uses the fragment caching and an around filter to do the job. The fragment cache is named according to both
- # the current host and the path. So a page that is accessed at http://david.somewhere.com/lists/show/1 will result in a fragment named
- # "david.somewhere.com/lists/show/1". This allows the cacher to differentiate between "david.somewhere.com/lists/" and
- # "jamis.somewhere.com/lists/" -- which is a helpful way of assisting the subdomain-as-account-key pattern.
- #
- # Different representations of the same resource, e.g. <tt>http://david.somewhere.com/lists</tt> and <tt>http://david.somewhere.com/lists.xml</tt>
- # are treated like separate requests and so are cached separately. Keep in mind when expiring an action cache that <tt>:action => 'lists'</tt> is not the same
- # as <tt>:action => 'list', :format => :xml</tt>.
- #
- # You can set modify the default action cache path by passing a :cache_path option. This will be passed directly to ActionCachePath.path_for. This is handy
- # for actions with multiple possible routes that should be cached differently. If a block is given, it is called with the current controller instance.
- #
- # class ListsController < ApplicationController
- # before_filter :authenticate, :except => :public
- # caches_page :public
- # caches_action :show, :cache_path => { :project => 1 }
- # caches_action :show, :cache_path => Proc.new { |controller|
- # controller.params[:user_id] ?
- # controller.send(:user_list_url, c.params[:user_id], c.params[:id]) :
- # controller.send(:list_url, c.params[:id]) }
- # end
- module Actions
- def self.included(base) #:nodoc:
- base.extend(ClassMethods)
- base.class_eval do
- attr_accessor :rendered_action_cache, :action_cache_path
- alias_method_chain :protected_instance_variables, :action_caching
- end
- end
-
- module ClassMethods
- # Declares that +actions+ should be cached.
- # See ActionController::Caching::Actions for details.
- def caches_action(*actions)
- return unless perform_caching
- around_filter(ActionCacheFilter.new(*actions))
- end
- end
-
- def protected_instance_variables_with_action_caching
- protected_instance_variables_without_action_caching + %w(@action_cache_path)
- end
-
- def expire_action(options = {})
- return unless perform_caching
- if options[:action].is_a?(Array)
- options[:action].dup.each do |action|
- expire_fragment(ActionCachePath.path_for(self, options.merge({ :action => action })))
- end
+ protected
+ # Convenience accessor
+ def cache(key, options = nil, &block)
+ if cache_configured?
+ cache_store.fetch(ActiveSupport::Cache.expand_cache_key(key, :controller), options, &block)
else
- expire_fragment(ActionCachePath.path_for(self, options))
+ yield
end
end
- class ActionCacheFilter #:nodoc:
- def initialize(*actions, &block)
- @options = actions.extract_options!
- @actions = Set.new actions
- end
-
- def before(controller)
- return unless @actions.include?(controller.action_name.intern)
- cache_path = ActionCachePath.new(controller, path_options_for(controller, @options))
- if cache = controller.read_fragment(cache_path.path)
- controller.rendered_action_cache = true
- set_content_type!(controller, cache_path.extension)
- controller.send!(:render_for_text, cache)
- false
- else
- controller.action_cache_path = cache_path
- end
- end
-
- def after(controller)
- return if !@actions.include?(controller.action_name.intern) || controller.rendered_action_cache || !caching_allowed(controller)
- controller.write_fragment(controller.action_cache_path.path, controller.response.body)
- end
- private
- def set_content_type!(controller, extension)
- controller.response.content_type = Mime::Type.lookup_by_extension(extension).to_s if extension
- end
-
- def path_options_for(controller, options)
- ((path_options = options[:cache_path]).respond_to?(:call) ? path_options.call(controller) : path_options) || {}
- end
-
- def caching_allowed(controller)
- controller.request.get? && controller.response.headers['Status'].to_i == 200
- end
- end
-
- class ActionCachePath
- attr_reader :path, :extension
-
- class << self
- def path_for(controller, options)
- new(controller, options).path
- end
- end
-
- def initialize(controller, options = {})
- @extension = extract_extension(controller.request.path)
- path = controller.url_for(options).split('://').last
- normalize!(path)
- add_extension!(path, @extension)
- @path = URI.unescape(path)
- end
-
- private
- def normalize!(path)
- path << 'index' if path[-1] == ?/
- end
-
- def add_extension!(path, extension)
- path << ".#{extension}" if extension
- end
-
- def extract_extension(file_path)
- # Don't want just what comes after the last '.' to accommodate multi part extensions
- # such as tar.gz.
- file_path[/^[^.]+\.(.+)$/, 1]
- end
+ private
+ def cache_configured?
+ self.class.cache_configured?
end
- end
-
- # Fragment caching is used for caching various blocks within templates without caching the entire action as a whole. This is useful when
- # certain elements of an action change frequently or depend on complicated state while other parts rarely change or can be shared amongst multiple
- # parties. The caching is doing using the cache helper available in the Action View. A template with caching might look something like:
- #
- # <b>Hello <%= @name %></b>
- # <% cache do %>
- # All the topics in the system:
- # <%= render :partial => "topic", :collection => Topic.find(:all) %>
- # <% end %>
- #
- # This cache will bind to the name of the action that called it, so if this code was part of the view for the topics/list action, you would
- # be able to invalidate it using <tt>expire_fragment(:controller => "topics", :action => "list")</tt>.
- #
- # This default behavior is of limited use if you need to cache multiple fragments per action or if the action itself is cached using
- # <tt>caches_action</tt>, so we also have the option to qualify the name of the cached fragment with something like:
- #
- # <% cache(:action => "list", :action_suffix => "all_topics") do %>
- #
- # That would result in a name such as "/topics/list/all_topics", avoiding conflicts with the action cache and with any fragments that use a
- # different suffix. Note that the URL doesn't have to really exist or be callable - the url_for system is just used to generate unique
- # cache names that we can refer to when we need to expire the cache.
- #
- # The expiration call for this example is:
- #
- # expire_fragment(:controller => "topics", :action => "list", :action_suffix => "all_topics")
- #
- # == Fragment stores
- #
- # By default, cached fragments are stored in memory. The available store options are:
- #
- # * FileStore: Keeps the fragments on disk in the +cache_path+, which works well for all types of environments and allows all
- # processes running from the same application directory to access the cached content.
- # * MemoryStore: Keeps the fragments in memory, which is fine for WEBrick and for FCGI (if you don't care that each FCGI process holds its
- # own fragment store). It's not suitable for CGI as the process is thrown away at the end of each request. It can potentially also take
- # up a lot of memory since each process keeps all the caches in memory.
- # * DRbStore: Keeps the fragments in the memory of a separate, shared DRb process. This works for all environments and only keeps one cache
- # around for all processes, but requires that you run and manage a separate DRb process.
- # * MemCacheStore: Works like DRbStore, but uses Danga's MemCache instead.
- # Requires the ruby-memcache library: gem install ruby-memcache.
- #
- # Configuration examples (MemoryStore is the default):
- #
- # ActionController::Base.fragment_cache_store = :memory_store
- # ActionController::Base.fragment_cache_store = :file_store, "/path/to/cache/directory"
- # ActionController::Base.fragment_cache_store = :drb_store, "druby://localhost:9192"
- # ActionController::Base.fragment_cache_store = :mem_cache_store, "localhost"
- # ActionController::Base.fragment_cache_store = MyOwnStore.new("parameter")
- module Fragments
- def self.included(base) #:nodoc:
- base.class_eval do
- @@fragment_cache_store = MemoryStore.new
- cattr_reader :fragment_cache_store
-
- # Defines the storage option for cached fragments
- def self.fragment_cache_store=(store_option)
- store, *parameters = *([ store_option ].flatten)
- @@fragment_cache_store = if store.is_a?(Symbol)
- store_class_name = (store == :drb_store ? "DRbStore" : store.to_s.camelize)
- store_class = ActionController::Caching::Fragments.const_get(store_class_name)
- store_class.new(*parameters)
- else
- store
- end
- end
- end
- end
-
- # Given a name (as described in <tt>expire_fragment</tt>), returns a key suitable for use in reading,
- # writing, or expiring a cached fragment. If the name is a hash, the generated name is the return
- # value of url_for on that hash (without the protocol).
- def fragment_cache_key(name)
- name.is_a?(Hash) ? url_for(name).split("://").last : name
- end
-
- def fragment_for(block, name = {}, options = nil) #:nodoc:
- unless perform_caching then block.call; return end
-
- buffer = yield
-
- if cache = read_fragment(name, options)
- buffer.concat(cache)
- else
- pos = buffer.length
- block.call
- write_fragment(name, buffer[pos..-1], options)
- end
- end
-
- # Called by CacheHelper#cache
- def cache_rxml_fragment(block, name = {}, options = nil) #:nodoc:
- fragment_for(block, name, options) do
- eval('xml.target!', block.binding)
- end
- end
-
- # Called by CacheHelper#cache
- def cache_rjs_fragment(block, name = {}, options = nil) #:nodoc:
- fragment_for(block, name, options) do
- begin
- debug_mode, ActionView::Base.debug_rjs = ActionView::Base.debug_rjs, false
- eval('page.to_s', block.binding)
- ensure
- ActionView::Base.debug_rjs = debug_mode
- end
- end
- end
-
- # Called by CacheHelper#cache
- def cache_erb_fragment(block, name = {}, options = nil) #:nodoc:
- fragment_for(block, name, options) do
- eval(ActionView::Base.erb_variable, block.binding)
- end
- end
-
-
- # Writes <tt>content</tt> to the location signified by <tt>name</tt> (see <tt>expire_fragment</tt> for acceptable formats)
- def write_fragment(name, content, options = nil)
- return unless perform_caching
-
- key = fragment_cache_key(name)
- self.class.benchmark "Cached fragment: #{key}" do
- fragment_cache_store.write(key, content, options)
- end
- content
- end
-
- # Reads a cached fragment from the location signified by <tt>name</tt> (see <tt>expire_fragment</tt> for acceptable formats)
- def read_fragment(name, options = nil)
- return unless perform_caching
-
- key = fragment_cache_key(name)
- self.class.benchmark "Fragment read: #{key}" do
- fragment_cache_store.read(key, options)
- end
- end
-
- # Name can take one of three forms:
- # * String: This would normally take the form of a path like "pages/45/notes"
- # * Hash: Is treated as an implicit call to url_for, like { :controller => "pages", :action => "notes", :id => 45 }
- # * Regexp: Will destroy all the matched fragments, example:
- # %r{pages/\d*/notes}
- # Ensure you do not specify start and finish in the regex (^$) because
- # the actual filename matched looks like ./cache/filename/path.cache
- # Regexp expiration is only supported on caches that can iterate over
- # all keys (unlike memcached).
- def expire_fragment(name, options = nil)
- return unless perform_caching
-
- key = fragment_cache_key(name)
-
- if key.is_a?(Regexp)
- self.class.benchmark "Expired fragments matching: #{key.source}" do
- fragment_cache_store.delete_matched(key, options)
- end
- else
- self.class.benchmark "Expired fragment: #{key}" do
- fragment_cache_store.delete(key, options)
- end
- end
- end
-
-
- class UnthreadedMemoryStore #:nodoc:
- def initialize #:nodoc:
- @data = {}
- end
-
- def read(name, options=nil) #:nodoc:
- @data[name]
- end
-
- def write(name, value, options=nil) #:nodoc:
- @data[name] = value
- end
-
- def delete(name, options=nil) #:nodoc:
- @data.delete(name)
- end
-
- def delete_matched(matcher, options=nil) #:nodoc:
- @data.delete_if { |k,v| k =~ matcher }
- end
- end
-
- module ThreadSafety #:nodoc:
- def read(name, options=nil) #:nodoc:
- @mutex.synchronize { super }
- end
-
- def write(name, value, options=nil) #:nodoc:
- @mutex.synchronize { super }
- end
-
- def delete(name, options=nil) #:nodoc:
- @mutex.synchronize { super }
- end
-
- def delete_matched(matcher, options=nil) #:nodoc:
- @mutex.synchronize { super }
- end
- end
-
- class MemoryStore < UnthreadedMemoryStore #:nodoc:
- def initialize #:nodoc:
- super
- if ActionController::Base.allow_concurrency
- @mutex = Mutex.new
- MemoryStore.module_eval { include ThreadSafety }
- end
- end
- end
-
- class DRbStore < MemoryStore #:nodoc:
- attr_reader :address
-
- def initialize(address = 'druby://localhost:9192')
- super()
- @address = address
- @data = DRbObject.new(nil, address)
- end
- end
-
- begin
- require_library_or_gem 'memcache'
- class MemCacheStore < MemoryStore #:nodoc:
- attr_reader :addresses
-
- def initialize(*addresses)
- super()
- addresses = addresses.flatten
- addresses = ["localhost"] if addresses.empty?
- @addresses = addresses
- @data = MemCache.new(*addresses)
- end
- end
- rescue LoadError
- # MemCache wasn't available so neither can the store be
- end
-
- class UnthreadedFileStore #:nodoc:
- attr_reader :cache_path
-
- def initialize(cache_path)
- @cache_path = cache_path
- end
-
- def write(name, value, options = nil) #:nodoc:
- ensure_cache_path(File.dirname(real_file_path(name)))
- File.open(real_file_path(name), "wb+") { |f| f.write(value) }
- rescue => e
- Base.logger.error "Couldn't create cache directory: #{name} (#{e.message})" if Base.logger
- end
-
- def read(name, options = nil) #:nodoc:
- File.open(real_file_path(name), 'rb') { |f| f.read } rescue nil
- end
-
- def delete(name, options) #:nodoc:
- File.delete(real_file_path(name))
- rescue SystemCallError => e
- # If there's no cache, then there's nothing to complain about
- end
-
- def delete_matched(matcher, options) #:nodoc:
- search_dir(@cache_path) do |f|
- if f =~ matcher
- begin
- File.delete(f)
- rescue SystemCallError => e
- # If there's no cache, then there's nothing to complain about
- end
- end
- end
- end
-
- private
- def real_file_path(name)
- '%s/%s.cache' % [@cache_path, name.gsub('?', '.').gsub(':', '.')]
- end
-
- def ensure_cache_path(path)
- FileUtils.makedirs(path) unless File.exist?(path)
- end
-
- def search_dir(dir, &callback)
- Dir.foreach(dir) do |d|
- next if d == "." || d == ".."
- name = File.join(dir, d)
- if File.directory?(name)
- search_dir(name, &callback)
- else
- callback.call name
- end
- end
- end
- end
-
- class FileStore < UnthreadedFileStore #:nodoc:
- def initialize(cache_path)
- super(cache_path)
- if ActionController::Base.allow_concurrency
- @mutex = Mutex.new
- FileStore.module_eval { include ThreadSafety }
- end
- end
- end
- end
-
- # Sweepers are the terminators of the caching world and responsible for expiring caches when model objects change.
- # They do this by being half-observers, half-filters and implementing callbacks for both roles. A Sweeper example:
- #
- # class ListSweeper < ActionController::Caching::Sweeper
- # observe List, Item
- #
- # def after_save(record)
- # list = record.is_a?(List) ? record : record.list
- # expire_page(:controller => "lists", :action => %w( show public feed ), :id => list.id)
- # expire_action(:controller => "lists", :action => "all")
- # list.shares.each { |share| expire_page(:controller => "lists", :action => "show", :id => share.url_key) }
- # end
- # end
- #
- # The sweeper is assigned in the controllers that wish to have its job performed using the <tt>cache_sweeper</tt> class method:
- #
- # class ListsController < ApplicationController
- # caches_action :index, :show, :public, :feed
- # cache_sweeper :list_sweeper, :only => [ :edit, :destroy, :share ]
- # end
- #
- # In the example above, four actions are cached and three actions are responsible for expiring those caches.
- module Sweeping
- def self.included(base) #:nodoc:
- base.extend(ClassMethods)
- end
-
- module ClassMethods #:nodoc:
- def cache_sweeper(*sweepers)
- return unless perform_caching
- configuration = sweepers.extract_options!
- sweepers.each do |sweeper|
- ActiveRecord::Base.observers << sweeper if defined?(ActiveRecord) and defined?(ActiveRecord::Base)
- sweeper_instance = Object.const_get(Inflector.classify(sweeper)).instance
-
- if sweeper_instance.is_a?(Sweeper)
- around_filter(sweeper_instance, :only => configuration[:only])
- else
- after_filter(sweeper_instance, :only => configuration[:only])
- end
- end
- end
- end
- end
-
- if defined?(ActiveRecord) and defined?(ActiveRecord::Observer)
- class Sweeper < ActiveRecord::Observer #:nodoc:
- attr_accessor :controller
-
- def before(controller)
- self.controller = controller
- callback(:before)
- end
-
- def after(controller)
- callback(:after)
- # Clean up, so that the controller can be collected after this request
- self.controller = nil
- end
-
- protected
- # gets the action cache path for the given options.
- def action_path_for(options)
- ActionController::Caching::Actions::ActionCachePath.path_for(controller, options)
- end
-
- # Retrieve instance variables set in the controller.
- def assigns(key)
- controller.instance_variable_get("@#{key}")
- end
-
- private
- def callback(timing)
- controller_callback_method_name = "#{timing}_#{controller.controller_name.underscore}"
- action_callback_method_name = "#{controller_callback_method_name}_#{controller.action_name}"
-
- send!(controller_callback_method_name) if respond_to?(controller_callback_method_name, true)
- send!(action_callback_method_name) if respond_to?(action_callback_method_name, true)
- end
-
- def method_missing(method, *arguments)
- return if @controller.nil?
- @controller.send!(method, *arguments)
- end
- end
- end
-
- module SqlCache
- def self.included(base) #:nodoc:
- if defined?(ActiveRecord) && ActiveRecord::Base.respond_to?(:cache)
- base.alias_method_chain :perform_action, :caching
- end
- end
-
- def perform_action_with_caching
- ActiveRecord::Base.cache do
- perform_action_without_caching
- end
- end
- end
end
-end
+end \ No newline at end of file
diff --git a/actionpack/lib/action_controller/caching/actions.rb b/actionpack/lib/action_controller/caching/actions.rb
new file mode 100644
index 0000000000..4410e47eb3
--- /dev/null
+++ b/actionpack/lib/action_controller/caching/actions.rb
@@ -0,0 +1,148 @@
+require 'set'
+
+module ActionController #:nodoc:
+ module Caching
+ # Action caching is similar to page caching by the fact that the entire output of the response is cached, but unlike page caching,
+ # every request still goes through the Action Pack. The key benefit of this is that filters are run before the cache is served, which
+ # allows for authentication and other restrictions on whether someone is allowed to see the cache. Example:
+ #
+ # class ListsController < ApplicationController
+ # before_filter :authenticate, :except => :public
+ # caches_page :public
+ # caches_action :show, :feed
+ # end
+ #
+ # In this example, the public action doesn't require authentication, so it's possible to use the faster page caching method. But both the
+ # show and feed action are to be shielded behind the authenticate filter, so we need to implement those as action caches.
+ #
+ # Action caching internally uses the fragment caching and an around filter to do the job. The fragment cache is named according to both
+ # the current host and the path. So a page that is accessed at http://david.somewhere.com/lists/show/1 will result in a fragment named
+ # "david.somewhere.com/lists/show/1". This allows the cacher to differentiate between "david.somewhere.com/lists/" and
+ # "jamis.somewhere.com/lists/" -- which is a helpful way of assisting the subdomain-as-account-key pattern.
+ #
+ # Different representations of the same resource, e.g. <tt>http://david.somewhere.com/lists</tt> and <tt>http://david.somewhere.com/lists.xml</tt>
+ # are treated like separate requests and so are cached separately. Keep in mind when expiring an action cache that <tt>:action => 'lists'</tt> is not the same
+ # as <tt>:action => 'list', :format => :xml</tt>.
+ #
+ # You can set modify the default action cache path by passing a :cache_path option. This will be passed directly to ActionCachePath.path_for. This is handy
+ # for actions with multiple possible routes that should be cached differently. If a block is given, it is called with the current controller instance.
+ #
+ # class ListsController < ApplicationController
+ # before_filter :authenticate, :except => :public
+ # caches_page :public
+ # caches_action :show, :cache_path => { :project => 1 }
+ # caches_action :show, :cache_path => Proc.new { |controller|
+ # controller.params[:user_id] ?
+ # controller.send(:user_list_url, c.params[:user_id], c.params[:id]) :
+ # controller.send(:list_url, c.params[:id]) }
+ # end
+ module Actions
+ def self.included(base) #:nodoc:
+ base.extend(ClassMethods)
+ base.class_eval do
+ attr_accessor :rendered_action_cache, :action_cache_path
+ alias_method_chain :protected_instance_variables, :action_caching
+ end
+ end
+
+ module ClassMethods
+ # Declares that +actions+ should be cached.
+ # See ActionController::Caching::Actions for details.
+ def caches_action(*actions)
+ return unless cache_configured?
+ around_filter(ActionCacheFilter.new(*actions))
+ end
+ end
+
+ protected
+ def protected_instance_variables_with_action_caching
+ protected_instance_variables_without_action_caching + %w(@action_cache_path)
+ end
+
+ def expire_action(options = {})
+ return unless cache_configured?
+
+ if options[:action].is_a?(Array)
+ options[:action].dup.each do |action|
+ expire_fragment(ActionCachePath.path_for(self, options.merge({ :action => action })))
+ end
+ else
+ expire_fragment(ActionCachePath.path_for(self, options))
+ end
+ end
+
+ class ActionCacheFilter #:nodoc:
+ def initialize(*actions, &block)
+ @options = actions.extract_options!
+ @actions = Set.new(actions)
+ end
+
+ def before(controller)
+ return unless @actions.include?(controller.action_name.intern)
+
+ cache_path = ActionCachePath.new(controller, path_options_for(controller, @options))
+
+ if cache = controller.read_fragment(cache_path.path)
+ controller.rendered_action_cache = true
+ set_content_type!(controller, cache_path.extension)
+ controller.send!(:render_for_text, cache)
+ false
+ else
+ controller.action_cache_path = cache_path
+ end
+ end
+
+ def after(controller)
+ return if !@actions.include?(controller.action_name.intern) || controller.rendered_action_cache || !caching_allowed(controller)
+ controller.write_fragment(controller.action_cache_path.path, controller.response.body)
+ end
+
+ private
+ def set_content_type!(controller, extension)
+ controller.response.content_type = Mime::Type.lookup_by_extension(extension).to_s if extension
+ end
+
+ def path_options_for(controller, options)
+ ((path_options = options[:cache_path]).respond_to?(:call) ? path_options.call(controller) : path_options) || {}
+ end
+
+ def caching_allowed(controller)
+ controller.request.get? && controller.response.headers['Status'].to_i == 200
+ end
+ end
+
+ class ActionCachePath
+ attr_reader :path, :extension
+
+ class << self
+ def path_for(controller, options)
+ new(controller, options).path
+ end
+ end
+
+ def initialize(controller, options = {})
+ @extension = extract_extension(controller.request.path)
+ path = controller.url_for(options).split('://').last
+ normalize!(path)
+ add_extension!(path, @extension)
+ @path = URI.unescape(path)
+ end
+
+ private
+ def normalize!(path)
+ path << 'index' if path[-1] == ?/
+ end
+
+ def add_extension!(path, extension)
+ path << ".#{extension}" if extension
+ end
+
+ def extract_extension(file_path)
+ # Don't want just what comes after the last '.' to accommodate multi part extensions
+ # such as tar.gz.
+ file_path[/^[^.]+\.(.+)$/, 1]
+ end
+ end
+ end
+ end
+end \ No newline at end of file
diff --git a/actionpack/lib/action_controller/caching/fragments.rb b/actionpack/lib/action_controller/caching/fragments.rb
new file mode 100644
index 0000000000..868af19780
--- /dev/null
+++ b/actionpack/lib/action_controller/caching/fragments.rb
@@ -0,0 +1,153 @@
+module ActionController #:nodoc:
+ module Caching
+ # Fragment caching is used for caching various blocks within templates without caching the entire action as a whole. This is useful when
+ # certain elements of an action change frequently or depend on complicated state while other parts rarely change or can be shared amongst multiple
+ # parties. The caching is doing using the cache helper available in the Action View. A template with caching might look something like:
+ #
+ # <b>Hello <%= @name %></b>
+ # <% cache do %>
+ # All the topics in the system:
+ # <%= render :partial => "topic", :collection => Topic.find(:all) %>
+ # <% end %>
+ #
+ # This cache will bind to the name of the action that called it, so if this code was part of the view for the topics/list action, you would
+ # be able to invalidate it using <tt>expire_fragment(:controller => "topics", :action => "list")</tt>.
+ #
+ # This default behavior is of limited use if you need to cache multiple fragments per action or if the action itself is cached using
+ # <tt>caches_action</tt>, so we also have the option to qualify the name of the cached fragment with something like:
+ #
+ # <% cache(:action => "list", :action_suffix => "all_topics") do %>
+ #
+ # That would result in a name such as "/topics/list/all_topics", avoiding conflicts with the action cache and with any fragments that use a
+ # different suffix. Note that the URL doesn't have to really exist or be callable - the url_for system is just used to generate unique
+ # cache names that we can refer to when we need to expire the cache.
+ #
+ # The expiration call for this example is:
+ #
+ # expire_fragment(:controller => "topics", :action => "list", :action_suffix => "all_topics")
+ module Fragments
+ def self.included(base) #:nodoc:
+ base.class_eval do
+ class << self
+ def fragment_cache_store=(store_option) #:nodoc:
+ ActiveSupport::Deprecation.warn('The fragment_cache_store= method is now use cache_store=')
+ self.cache_store = store_option
+ end
+
+ def fragment_cache_store #:nodoc:
+ ActiveSupport::Deprecation.warn('The fragment_cache_store method is now use cache_store')
+ cache_store
+ end
+ end
+
+ def fragment_cache_store=(store_option) #:nodoc:
+ ActiveSupport::Deprecation.warn('The fragment_cache_store= method is now use cache_store=')
+ self.cache_store = store_option
+ end
+
+ def fragment_cache_store #:nodoc:
+ ActiveSupport::Deprecation.warn('The fragment_cache_store method is now use cache_store')
+ cache_store
+ end
+ end
+ end
+
+ # Given a key (as described in <tt>expire_fragment</tt>), returns a key suitable for use in reading,
+ # writing, or expiring a cached fragment. If the key is a hash, the generated key is the return
+ # value of url_for on that hash (without the protocol). All keys are prefixed with "views/" and uses
+ # ActiveSupport::Cache.expand_cache_key for the expansion.
+ def fragment_cache_key(key)
+ ActiveSupport::Cache.expand_cache_key(key.is_a?(Hash) ? url_for(key).split("://").last : key, :views)
+ end
+
+ def fragment_for(block, name = {}, options = nil) #:nodoc:
+ unless perform_caching then block.call; return end
+
+ buffer = yield
+
+ if cache = read_fragment(name, options)
+ buffer.concat(cache)
+ else
+ pos = buffer.length
+ block.call
+ write_fragment(name, buffer[pos..-1], options)
+ end
+ end
+
+ # Called by CacheHelper#cache
+ def cache_rxml_fragment(block, name = {}, options = nil) #:nodoc:
+ fragment_for(block, name, options) do
+ eval('xml.target!', block.binding)
+ end
+ end
+
+ # Called by CacheHelper#cache
+ def cache_rjs_fragment(block, name = {}, options = nil) #:nodoc:
+ fragment_for(block, name, options) do
+ begin
+ debug_mode, ActionView::Base.debug_rjs = ActionView::Base.debug_rjs, false
+ eval('page.to_s', block.binding)
+ ensure
+ ActionView::Base.debug_rjs = debug_mode
+ end
+ end
+ end
+
+ # Called by CacheHelper#cache
+ def cache_erb_fragment(block, name = {}, options = nil) #:nodoc:
+ fragment_for(block, name, options) do
+ eval(ActionView::Base.erb_variable, block.binding)
+ end
+ end
+
+ # Writes <tt>content</tt> to the location signified by <tt>key</tt> (see <tt>expire_fragment</tt> for acceptable formats)
+ def write_fragment(key, content, options = nil)
+ return unless cache_configured?
+
+ key = fragment_cache_key(key)
+
+ self.class.benchmark "Cached fragment miss: #{key}" do
+ cache_store.write(key, content, options)
+ end
+
+ content
+ end
+
+ # Reads a cached fragment from the location signified by <tt>key</tt> (see <tt>expire_fragment</tt> for acceptable formats)
+ def read_fragment(key, options = nil)
+ return unless cache_configured?
+
+ key = fragment_cache_key(key)
+
+ self.class.benchmark "Cached fragment hit: #{key}" do
+ cache_store.read(key, options)
+ end
+ end
+
+ # Name can take one of three forms:
+ # * String: This would normally take the form of a path like "pages/45/notes"
+ # * Hash: Is treated as an implicit call to url_for, like { :controller => "pages", :action => "notes", :id => 45 }
+ # * Regexp: Will destroy all the matched fragments, example:
+ # %r{pages/\d*/notes}
+ # Ensure you do not specify start and finish in the regex (^$) because
+ # the actual filename matched looks like ./cache/filename/path.cache
+ # Regexp expiration is only supported on caches that can iterate over
+ # all keys (unlike memcached).
+ def expire_fragment(key, options = nil)
+ return unless cache_configured?
+
+ key = key.is_a?(Regexp) ? key : fragment_cache_key(key)
+
+ if key.is_a?(Regexp)
+ self.class.benchmark "Expired fragments matching: #{key.source}" do
+ cache_store.delete_matched(key, options)
+ end
+ else
+ self.class.benchmark "Expired fragment: #{key}" do
+ cache_store.delete(key, options)
+ end
+ end
+ end
+ end
+ end
+end \ No newline at end of file
diff --git a/actionpack/lib/action_controller/caching/pages.rb b/actionpack/lib/action_controller/caching/pages.rb
new file mode 100644
index 0000000000..4307f39583
--- /dev/null
+++ b/actionpack/lib/action_controller/caching/pages.rb
@@ -0,0 +1,141 @@
+require 'fileutils'
+require 'uri'
+
+module ActionController #:nodoc:
+ module Caching
+ # Page caching is an approach to caching where the entire action output of is stored as a HTML file that the web server
+ # can serve without going through the Action Pack. This can be as much as 100 times faster than going through the process of dynamically
+ # generating the content. Unfortunately, this incredible speed-up is only available to stateless pages where all visitors
+ # are treated the same. Content management systems -- including weblogs and wikis -- have many pages that are a great fit
+ # for this approach, but account-based systems where people log in and manipulate their own data are often less likely candidates.
+ #
+ # Specifying which actions to cache is done through the <tt>caches</tt> class method:
+ #
+ # class WeblogController < ActionController::Base
+ # caches_page :show, :new
+ # end
+ #
+ # This will generate cache files such as weblog/show/5 and weblog/new, which match the URLs used to trigger the dynamic
+ # generation. This is how the web server is able pick up a cache file when it exists and otherwise let the request pass on to
+ # the Action Pack to generate it.
+ #
+ # Expiration of the cache is handled by deleting the cached file, which results in a lazy regeneration approach where the cache
+ # is not restored before another hit is made against it. The API for doing so mimics the options from url_for and friends:
+ #
+ # class WeblogController < ActionController::Base
+ # def update
+ # List.update(params[:list][:id], params[:list])
+ # expire_page :action => "show", :id => params[:list][:id]
+ # redirect_to :action => "show", :id => params[:list][:id]
+ # end
+ # end
+ #
+ # Additionally, you can expire caches using Sweepers that act on changes in the model to determine when a cache is supposed to be
+ # expired.
+ #
+ # == Setting the cache directory
+ #
+ # The cache directory should be the document root for the web server and is set using Base.page_cache_directory = "/document/root".
+ # For Rails, this directory has already been set to RAILS_ROOT + "/public".
+ #
+ # == Setting the cache extension
+ #
+ # By default, the cache extension is .html, which makes it easy for the cached files to be picked up by the web server. If you want
+ # something else, like .php or .shtml, just set Base.page_cache_extension.
+ module Pages
+ def self.included(base) #:nodoc:
+ base.extend(ClassMethods)
+ base.class_eval do
+ @@page_cache_directory = defined?(RAILS_ROOT) ? "#{RAILS_ROOT}/public" : ""
+ cattr_accessor :page_cache_directory
+
+ @@page_cache_extension = '.html'
+ cattr_accessor :page_cache_extension
+ end
+ end
+
+ module ClassMethods
+ # Expires the page that was cached with the +path+ as a key. Example:
+ # expire_page "/lists/show"
+ def expire_page(path)
+ return unless perform_caching
+
+ benchmark "Expired page: #{page_cache_file(path)}" do
+ File.delete(page_cache_path(path)) if File.exist?(page_cache_path(path))
+ end
+ end
+
+ # Manually cache the +content+ in the key determined by +path+. Example:
+ # cache_page "I'm the cached content", "/lists/show"
+ def cache_page(content, path)
+ return unless perform_caching
+
+ benchmark "Cached page: #{page_cache_file(path)}" do
+ FileUtils.makedirs(File.dirname(page_cache_path(path)))
+ File.open(page_cache_path(path), "wb+") { |f| f.write(content) }
+ end
+ end
+
+ # Caches the +actions+ using the page-caching approach that'll store the cache in a path within the page_cache_directory that
+ # matches the triggering url.
+ def caches_page(*actions)
+ return unless perform_caching
+ actions = actions.map(&:to_s)
+ after_filter { |c| c.cache_page if actions.include?(c.action_name) }
+ end
+
+ private
+ def page_cache_file(path)
+ name = (path.empty? || path == "/") ? "/index" : URI.unescape(path.chomp('/'))
+ name << page_cache_extension unless (name.split('/').last || name).include? '.'
+ return name
+ end
+
+ def page_cache_path(path)
+ page_cache_directory + page_cache_file(path)
+ end
+ end
+
+ # Expires the page that was cached with the +options+ as a key. Example:
+ # expire_page :controller => "lists", :action => "show"
+ def expire_page(options = {})
+ return unless perform_caching
+
+ if options.is_a?(Hash)
+ if options[:action].is_a?(Array)
+ options[:action].dup.each do |action|
+ self.class.expire_page(url_for(options.merge(:only_path => true, :skip_relative_url_root => true, :action => action)))
+ end
+ else
+ self.class.expire_page(url_for(options.merge(:only_path => true, :skip_relative_url_root => true)))
+ end
+ else
+ self.class.expire_page(options)
+ end
+ end
+
+ # Manually cache the +content+ in the key determined by +options+. If no content is provided, the contents of response.body is used
+ # If no options are provided, the requested url is used. Example:
+ # cache_page "I'm the cached content", :controller => "lists", :action => "show"
+ def cache_page(content = nil, options = nil)
+ return unless perform_caching && caching_allowed
+
+ path = case options
+ when Hash
+ url_for(options.merge(:only_path => true, :skip_relative_url_root => true, :format => params[:format]))
+ when String
+ options
+ else
+ request.path
+ end
+
+ self.class.cache_page(content || response.body, path)
+ end
+
+ private
+ def caching_allowed
+ request.get? && response.headers['Status'].to_i == 200
+ end
+ end
+ end
+end \ No newline at end of file
diff --git a/actionpack/lib/action_controller/caching/sql_cache.rb b/actionpack/lib/action_controller/caching/sql_cache.rb
new file mode 100644
index 0000000000..139be6100d
--- /dev/null
+++ b/actionpack/lib/action_controller/caching/sql_cache.rb
@@ -0,0 +1,18 @@
+module ActionController #:nodoc:
+ module Caching
+ module SqlCache
+ def self.included(base) #:nodoc:
+ if defined?(ActiveRecord) && ActiveRecord::Base.respond_to?(:cache)
+ base.alias_method_chain :perform_action, :caching
+ end
+ end
+
+ protected
+ def perform_action_with_caching
+ ActiveRecord::Base.cache do
+ perform_action_without_caching
+ end
+ end
+ end
+ end
+end \ No newline at end of file
diff --git a/actionpack/lib/action_controller/caching/sweeping.rb b/actionpack/lib/action_controller/caching/sweeping.rb
new file mode 100644
index 0000000000..eda4459cda
--- /dev/null
+++ b/actionpack/lib/action_controller/caching/sweeping.rb
@@ -0,0 +1,90 @@
+module ActionController #:nodoc:
+ module Caching
+ # Sweepers are the terminators of the caching world and responsible for expiring caches when model objects change.
+ # They do this by being half-observers, half-filters and implementing callbacks for both roles. A Sweeper example:
+ #
+ # class ListSweeper < ActionController::Caching::Sweeper
+ # observe List, Item
+ #
+ # def after_save(record)
+ # list = record.is_a?(List) ? record : record.list
+ # expire_page(:controller => "lists", :action => %w( show public feed ), :id => list.id)
+ # expire_action(:controller => "lists", :action => "all")
+ # list.shares.each { |share| expire_page(:controller => "lists", :action => "show", :id => share.url_key) }
+ # end
+ # end
+ #
+ # The sweeper is assigned in the controllers that wish to have its job performed using the <tt>cache_sweeper</tt> class method:
+ #
+ # class ListsController < ApplicationController
+ # caches_action :index, :show, :public, :feed
+ # cache_sweeper :list_sweeper, :only => [ :edit, :destroy, :share ]
+ # end
+ #
+ # In the example above, four actions are cached and three actions are responsible for expiring those caches.
+ module Sweeping
+ def self.included(base) #:nodoc:
+ base.extend(ClassMethods)
+ end
+
+ module ClassMethods #:nodoc:
+ def cache_sweeper(*sweepers)
+ return unless perform_caching
+ configuration = sweepers.extract_options!
+ sweepers.each do |sweeper|
+ ActiveRecord::Base.observers << sweeper if defined?(ActiveRecord) and defined?(ActiveRecord::Base)
+ sweeper_instance = Object.const_get(Inflector.classify(sweeper)).instance
+
+ if sweeper_instance.is_a?(Sweeper)
+ around_filter(sweeper_instance, :only => configuration[:only])
+ else
+ after_filter(sweeper_instance, :only => configuration[:only])
+ end
+ end
+ end
+ end
+ end
+
+ if defined?(ActiveRecord) and defined?(ActiveRecord::Observer)
+ class Sweeper < ActiveRecord::Observer #:nodoc:
+ attr_accessor :controller
+
+ def before(controller)
+ self.controller = controller
+ callback(:before)
+ end
+
+ def after(controller)
+ callback(:after)
+ # Clean up, so that the controller can be collected after this request
+ self.controller = nil
+ end
+
+ protected
+ # gets the action cache path for the given options.
+ def action_path_for(options)
+ ActionController::Caching::Actions::ActionCachePath.path_for(controller, options)
+ end
+
+ # Retrieve instance variables set in the controller.
+ def assigns(key)
+ controller.instance_variable_get("@#{key}")
+ end
+
+ private
+ def callback(timing)
+ controller_callback_method_name = "#{timing}_#{controller.controller_name.underscore}"
+ action_callback_method_name = "#{controller_callback_method_name}_#{controller.action_name}"
+
+ send!(controller_callback_method_name) if respond_to?(controller_callback_method_name, true)
+ send!(action_callback_method_name) if respond_to?(action_callback_method_name, true)
+ end
+
+ def method_missing(method, *arguments)
+ return if @controller.nil?
+ @controller.send!(method, *arguments)
+ end
+ end
+ end
+ end
+end \ No newline at end of file
diff --git a/actionpack/lib/action_view/helpers/cache_helper.rb b/actionpack/lib/action_view/helpers/cache_helper.rb
index 87a8e8dc98..eb631cf201 100644
--- a/actionpack/lib/action_view/helpers/cache_helper.rb
+++ b/actionpack/lib/action_view/helpers/cache_helper.rb
@@ -31,21 +31,25 @@ module ActionView
# <%= render :partial => "topics", :collection => @topic_list %>
# <i>Topics listed alphabetically</i>
# <% end %>
- def cache(name = {}, &block)
+ def cache(name = {}, options = nil, &block)
template_extension = first_render[/\.(\w+)$/, 1].to_sym
+
case template_extension
when :erb, :rhtml
- @controller.cache_erb_fragment(block, name)
+ @controller.cache_erb_fragment(block, name, options)
when :rjs
- @controller.cache_rjs_fragment(block, name)
+ @controller.cache_rjs_fragment(block, name, options)
when :builder, :rxml
- @controller.cache_rxml_fragment(block, name)
+ @controller.cache_rxml_fragment(block, name, options)
else
# do a last ditch effort for those brave souls using
# different template engines. This should give plugin
# writters a simple hook.
- raise "fragment caching not supported for #{template_extension} files." unless @controller.respond_to?("cache_#{template_extension}_fragment")
- @controller.send "cache_#{template_extension}_fragment", block, name
+ unless @controller.respond_to?("cache_#{template_extension}_fragment")
+ raise "fragment caching not supported for #{template_extension} files."
+ end
+
+ @controller.send!("cache_#{template_extension}_fragment", block, name, options)
end
end
end
diff --git a/actionpack/test/controller/caching_test.rb b/actionpack/test/controller/caching_test.rb
index cfc41cc279..15eb4c943a 100644
--- a/actionpack/test/controller/caching_test.rb
+++ b/actionpack/test/controller/caching_test.rb
@@ -5,7 +5,7 @@ CACHE_DIR = 'test_cache'
# Don't change '/../temp/' cavalierly or you might hose something you don't want hosed
FILE_STORE_PATH = File.join(File.dirname(__FILE__), '/../temp/', CACHE_DIR)
ActionController::Base.page_cache_directory = FILE_STORE_PATH
-ActionController::Base.fragment_cache_store = :file_store, FILE_STORE_PATH
+ActionController::Base.cache_store = :file_store, FILE_STORE_PATH
class PageCachingTestController < ActionController::Base
caches_page :ok, :no_content, :found, :not_found
@@ -343,7 +343,7 @@ class ActionCacheTest < Test::Unit::TestCase
end
def assert_cache_exists(path)
- full_path = File.join(FILE_STORE_PATH, path + '.cache')
+ full_path = File.join(FILE_STORE_PATH, "views", path + '.cache')
assert File.exist?(full_path), "#{full_path.inspect} does not exist."
end
end
@@ -355,8 +355,8 @@ end
class FragmentCachingTest < Test::Unit::TestCase
def setup
ActionController::Base.perform_caching = true
- @store = ActionController::Caching::Fragments::UnthreadedMemoryStore.new
- ActionController::Base.fragment_cache_store = @store
+ @store = ActiveSupport::Cache::MemoryStore.new
+ ActionController::Base.cache_store = @store
@controller = FragmentCachingTestController.new
@params = {:controller => 'posts', :action => 'index'}
@request = ActionController::TestRequest.new
@@ -368,57 +368,57 @@ class FragmentCachingTest < Test::Unit::TestCase
end
def test_fragement_cache_key
- assert_equal 'what a key', @controller.fragment_cache_key('what a key')
- assert_equal( "test.host/fragment_caching_test/some_action",
+ assert_equal 'views/what a key', @controller.fragment_cache_key('what a key')
+ assert_equal( "views/test.host/fragment_caching_test/some_action",
@controller.fragment_cache_key(:controller => 'fragment_caching_test',:action => 'some_action'))
end
def test_read_fragment__with_caching_enabled
- @store.write('name', 'value')
+ @store.write('views/name', 'value')
assert_equal 'value', @controller.read_fragment('name')
end
def test_read_fragment__with_caching_disabled
ActionController::Base.perform_caching = false
- @store.write('name', 'value')
+ @store.write('views/name', 'value')
assert_nil @controller.read_fragment('name')
end
def test_write_fragment__with_caching_enabled
- assert_nil @store.read('name')
+ assert_nil @store.read('views/name')
assert_equal 'value', @controller.write_fragment('name', 'value')
- assert_equal 'value', @store.read('name')
+ assert_equal 'value', @store.read('views/name')
end
def test_write_fragment__with_caching_disabled
- assert_nil @store.read('name')
+ assert_nil @store.read('views/name')
ActionController::Base.perform_caching = false
assert_equal nil, @controller.write_fragment('name', 'value')
- assert_nil @store.read('name')
+ assert_nil @store.read('views/name')
end
def test_expire_fragment__with_simple_key
- @store.write('name', 'value')
+ @store.write('views/name', 'value')
@controller.expire_fragment 'name'
- assert_nil @store.read('name')
+ assert_nil @store.read('views/name')
end
def test_expire_fragment__with__regexp
- @store.write('name', 'value')
- @store.write('another_name', 'another_value')
- @store.write('primalgrasp', 'will not expire ;-)')
+ @store.write('views/name', 'value')
+ @store.write('views/another_name', 'another_value')
+ @store.write('views/primalgrasp', 'will not expire ;-)')
@controller.expire_fragment /name/
- assert_nil @store.read('name')
- assert_nil @store.read('another_name')
- assert_equal 'will not expire ;-)', @store.read('primalgrasp')
+ assert_nil @store.read('views/name')
+ assert_nil @store.read('views/another_name')
+ assert_equal 'will not expire ;-)', @store.read('views/primalgrasp')
end
def test_fragment_for__with_disabled_caching
ActionController::Base.perform_caching = false
- @store.write('expensive', 'fragment content')
+ @store.write('views/expensive', 'fragment content')
fragment_computed = false
buffer = 'generated till now -> '
@@ -430,7 +430,7 @@ class FragmentCachingTest < Test::Unit::TestCase
def test_fragment_for
- @store.write('expensive', 'fragment content')
+ @store.write('views/expensive', 'fragment content')
fragment_computed = false
buffer = 'generated till now -> '
@@ -441,7 +441,7 @@ class FragmentCachingTest < Test::Unit::TestCase
end
def test_cache_erb_fragment
- @store.write('expensive', 'fragment content')
+ @store.write('views/expensive', 'fragment content')
_erbout = 'generated till now -> '
assert_equal( 'generated till now -> fragment content',
@@ -449,7 +449,7 @@ class FragmentCachingTest < Test::Unit::TestCase
end
def test_cache_rxml_fragment
- @store.write('expensive', 'fragment content')
+ @store.write('views/expensive', 'fragment content')
xml = 'generated till now -> '
class << xml; def target!; to_s; end; end
@@ -458,7 +458,7 @@ class FragmentCachingTest < Test::Unit::TestCase
end
def test_cache_rjs_fragment
- @store.write('expensive', 'fragment content')
+ @store.write('views/expensive', 'fragment content')
page = 'generated till now -> '
assert_equal( 'generated till now -> fragment content',
@@ -466,7 +466,7 @@ class FragmentCachingTest < Test::Unit::TestCase
end
def test_cache_rjs_fragment_debug_mode_does_not_interfere
- @store.write('expensive', 'fragment content')
+ @store.write('views/expensive', 'fragment content')
page = 'generated till now -> '
begin
diff --git a/actionpack/test/controller/fragment_store_setting_test.rb b/actionpack/test/controller/fragment_store_setting_test.rb
index 3df6fd0be2..e69de29bb2 100644
--- a/actionpack/test/controller/fragment_store_setting_test.rb
+++ b/actionpack/test/controller/fragment_store_setting_test.rb
@@ -1,47 +0,0 @@
-require File.dirname(__FILE__) + '/../abstract_unit'
-
-MemCache = Struct.new(:MemCache, :address) unless Object.const_defined?(:MemCache)
-
-class FragmentCacheStoreSettingTest < Test::Unit::TestCase
- def teardown
- ActionController::Base.fragment_cache_store = ActionController::Caching::Fragments::MemoryStore.new
- end
-
- def test_file_fragment_cache_store
- ActionController::Base.fragment_cache_store = :file_store, "/path/to/cache/directory"
- assert_kind_of(
- ActionController::Caching::Fragments::FileStore,
- ActionController::Base.fragment_cache_store
- )
- assert_equal "/path/to/cache/directory", ActionController::Base.fragment_cache_store.cache_path
- end
-
- def test_drb_fragment_cache_store
- ActionController::Base.fragment_cache_store = :drb_store, "druby://localhost:9192"
- assert_kind_of(
- ActionController::Caching::Fragments::DRbStore,
- ActionController::Base.fragment_cache_store
- )
- assert_equal "druby://localhost:9192", ActionController::Base.fragment_cache_store.address
- end
-
- if defined? CGI::Session::MemCacheStore
- def test_mem_cache_fragment_cache_store
- ActionController::Base.fragment_cache_store = :mem_cache_store, "localhost"
- assert_kind_of(
- ActionController::Caching::Fragments::MemCacheStore,
- ActionController::Base.fragment_cache_store
- )
- assert_equal %w(localhost), ActionController::Base.fragment_cache_store.addresses
- end
- end
-
- def test_object_assigned_fragment_cache_store
- ActionController::Base.fragment_cache_store = ActionController::Caching::Fragments::FileStore.new("/path/to/cache/directory")
- assert_kind_of(
- ActionController::Caching::Fragments::FileStore,
- ActionController::Base.fragment_cache_store
- )
- assert_equal "/path/to/cache/directory", ActionController::Base.fragment_cache_store.cache_path
- end
-end
diff --git a/activerecord/CHANGELOG b/activerecord/CHANGELOG
index 952033eefc..a6e963bebe 100644
--- a/activerecord/CHANGELOG
+++ b/activerecord/CHANGELOG
@@ -1,5 +1,7 @@
*SVN*
+* Added ActiveRecord::Base.cache_key to make it easier to cache Active Records in combination with the new ActiveSupport::Cache::* libraries [DHH]
+
* Make sure CSV fixtures are compatible with ruby 1.9's new csv implementation. [JEG2]
* Added by parameter to increment, decrement, and their bang varieties so you can do player1.increment!(:points, 5) #10542 [Sam]
diff --git a/activerecord/lib/active_record/base.rb b/activerecord/lib/active_record/base.rb
index 149ee61c52..d784f59d11 100755
--- a/activerecord/lib/active_record/base.rb
+++ b/activerecord/lib/active_record/base.rb
@@ -1959,6 +1959,22 @@ module ActiveRecord #:nodoc:
# We can't use alias_method here, because method 'id' optimizes itself on the fly.
(id = self.id) ? id.to_s : nil # Be sure to stringify the id for routes
end
+
+ # Returns a cache key that can be used to identify this record. Examples:
+ #
+ # Product.new.cache_key # => "products/new"
+ # Product.find(5).cache_key # => "products/5" (updated_at not available)
+ # Person.find(5).cache_key # => "people/5-20071224150000" (updated_at available)
+ def cache_key
+ case
+ when new_record?
+ "#{self.class.name.tableize}/new"
+ when self[:updated_at]
+ "#{self.class.name.tableize}/#{id}-#{updated_at.to_s(:number)}"
+ else
+ "#{self.class.name.tableize}/#{id}"
+ end
+ end
def id_before_type_cast #:nodoc:
read_attribute_before_type_cast(self.class.primary_key)
diff --git a/activesupport/CHANGELOG b/activesupport/CHANGELOG
index e8235fb4a1..16296a579f 100644
--- a/activesupport/CHANGELOG
+++ b/activesupport/CHANGELOG
@@ -1,5 +1,11 @@
*SVN*
+* Added ActiveSupport::Gzip.decompress/compress(source) as an easy wrapper for Zlib [Tobias Luetke]
+
+* Included MemCache-Client to make the improved ActiveSupport::Cache::MemCacheStore work out of the box [Bob Cottrell, Eric Hodel]
+
+* Added ActiveSupport::Cache::* framework as an extraction from ActionController::Caching::Fragments::* [DHH]
+
* Fixed String#titleize to work for strings with 's too #10571 [trek]
* Changed the implementation of Enumerable#group_by to use a double array approach instead of a hash such that the insert order is honored [DHH/Marcel]
diff --git a/activesupport/lib/active_support.rb b/activesupport/lib/active_support.rb
index 33e6fe97bf..c459511dc8 100644
--- a/activesupport/lib/active_support.rb
+++ b/activesupport/lib/active_support.rb
@@ -32,6 +32,9 @@ require 'active_support/core_ext'
require 'active_support/clean_logger'
require 'active_support/buffered_logger'
+require 'active_support/gzip'
+require 'active_support/cache'
+
require 'active_support/dependencies'
require 'active_support/deprecation'
diff --git a/activesupport/lib/active_support/cache.rb b/activesupport/lib/active_support/cache.rb
new file mode 100644
index 0000000000..8252ada032
--- /dev/null
+++ b/activesupport/lib/active_support/cache.rb
@@ -0,0 +1,121 @@
+module ActiveSupport
+ module Cache
+ def self.lookup_store(*store_option)
+ store, *parameters = *([ store_option ].flatten)
+
+ case store
+ when Symbol
+ store_class_name = (store == :drb_store ? "DRbStore" : store.to_s.camelize)
+ store_class = ActiveSupport::Cache.const_get(store_class_name)
+ store_class.new(*parameters)
+ when nil
+ ActiveSupport::Cache::MemoryStore.new
+ else
+ store
+ end
+ end
+
+ def self.expand_cache_key(key, namespace = nil)
+ expanded_cache_key = namespace ? "#{namespace}/" : ""
+
+ if ENV["RAILS_CACHE_ID"] || ENV["RAILS_APP_VERSION"]
+ expanded_cache_key << "#{ENV["RAILS_CACHE_ID"] || ENV["RAILS_APP_VERSION"]}/"
+ end
+
+ expanded_cache_key << case
+ when key.respond_to?(:cache_key)
+ key.cache_key
+ when key.is_a?(Array)
+ key.collect { |element| expand_cache_key(element) }.to_param
+ when key.respond_to?(:to_param)
+ key.to_param
+ end
+
+ expanded_cache_key
+ end
+
+
+ class Store
+ cattr_accessor :logger
+
+ def initialize
+ end
+
+ def threadsafe!
+ @mutex = Mutex.new
+ self.class.send :include, ThreadSafety
+ self
+ end
+
+ def fetch(key, options = nil)
+ @logger_off = true
+ if value = read(key, options)
+ @logger_off = false
+ log("hit", key, options)
+ value
+ elsif block_given?
+ @logger_off = false
+ log("miss", key, options)
+
+ value = nil
+ seconds = Benchmark.realtime { value = yield }
+
+ @logger_off = true
+ write(key, value, options)
+ @logger_off = false
+
+ log("write (will save #{'%.5f' % seconds})", key, nil)
+
+ value
+ end
+ end
+
+ def read(key, options = nil)
+ log("read", key, options)
+ end
+
+ def write(key, value, options = nil)
+ log("write", key, options)
+ end
+
+ def delete(key, options = nil)
+ log("delete", key, options)
+ end
+
+ def delete_matched(matcher, options = nil)
+ log("delete matched", matcher.inspect, options)
+ end
+
+
+ private
+ def log(operation, key, options)
+ logger.debug("Cache #{operation}: #{key}#{options ? " (#{options.inspect})" : ""}") if logger && !@logger_off
+ end
+ end
+
+
+ module ThreadSafety #:nodoc:
+ def read(key, options = nil) #:nodoc:
+ @mutex.synchronize { super }
+ end
+
+ def write(key, value, options = nil) #:nodoc:
+ @mutex.synchronize { super }
+ end
+
+ def delete(key, options = nil) #:nodoc:
+ @mutex.synchronize { super }
+ end
+
+ def delete_matched(matcher, options = nil) #:nodoc:
+ @mutex.synchronize { super }
+ end
+ end
+ end
+end
+
+require 'active_support/cache/file_store'
+require 'active_support/cache/memory_store'
+require 'active_support/cache/drb_store'
+require 'active_support/cache/mem_cache_store'
+require 'active_support/cache/compressed_mem_cache_store' \ No newline at end of file
diff --git a/activesupport/lib/active_support/cache/compressed_mem_cache_store.rb b/activesupport/lib/active_support/cache/compressed_mem_cache_store.rb
new file mode 100644
index 0000000000..9470ac9f66
--- /dev/null
+++ b/activesupport/lib/active_support/cache/compressed_mem_cache_store.rb
@@ -0,0 +1,15 @@
+module ActiveSupport
+ module Cache
+ class CompressedMemCacheStore < MemCacheStore
+ def read(name, options = {})
+ if value = super(name, options.merge(:raw => true))
+ Marshal.load(ActiveSupport::Gzip.decompress(value))
+ end
+ end
+
+ def write(name, value, options = {})
+ super(name, ActiveSupport::Gzip.compress(Marshal.dump(value)), options.merge(:raw => true))
+ end
+ end
+ end
+end
diff --git a/activesupport/lib/active_support/cache/drb_store.rb b/activesupport/lib/active_support/cache/drb_store.rb
new file mode 100644
index 0000000000..b80c2ee4d5
--- /dev/null
+++ b/activesupport/lib/active_support/cache/drb_store.rb
@@ -0,0 +1,15 @@
+require 'drb'
+
+module ActiveSupport
+ module Cache
+ class DRbStore < MemoryStore #:nodoc:
+ attr_reader :address
+
+ def initialize(address = 'druby://localhost:9192')
+ super()
+ @address = address
+ @data = DRbObject.new(nil, address)
+ end
+ end
+ end
+end \ No newline at end of file
diff --git a/activesupport/lib/active_support/cache/file_store.rb b/activesupport/lib/active_support/cache/file_store.rb
new file mode 100644
index 0000000000..88f9ac19db
--- /dev/null
+++ b/activesupport/lib/active_support/cache/file_store.rb
@@ -0,0 +1,65 @@
+module ActiveSupport
+ module Cache
+ class FileStore < Store
+ attr_reader :cache_path
+
+ def initialize(cache_path)
+ @cache_path = cache_path
+ end
+
+ def read(name, options = nil)
+ super
+ File.open(real_file_path(name), 'rb') { |f| f.read } rescue nil
+ end
+
+ def write(name, value, options = nil)
+ super
+ ensure_cache_path(File.dirname(real_file_path(name)))
+ File.open(real_file_path(name), "wb+") { |f| f.write(value) }
+ rescue => e
+ RAILS_DEFAULT_LOGGER.error "Couldn't create cache directory: #{name} (#{e.message})" if RAILS_DEFAULT_LOGGER
+ end
+
+ def delete(name, options)
+ super
+ File.delete(real_file_path(name))
+ rescue SystemCallError => e
+ # If there's no cache, then there's nothing to complain about
+ end
+
+ def delete_matched(matcher, options)
+ super
+ search_dir(@cache_path) do |f|
+ if f =~ matcher
+ begin
+ File.delete(f)
+ rescue SystemCallError => e
+ # If there's no cache, then there's nothing to complain about
+ end
+ end
+ end
+ end
+
+ private
+ def real_file_path(name)
+ '%s/%s.cache' % [@cache_path, name.gsub('?', '.').gsub(':', '.')]
+ end
+
+ def ensure_cache_path(path)
+ FileUtils.makedirs(path) unless File.exists?(path)
+ end
+
+ def search_dir(dir, &callback)
+ Dir.foreach(dir) do |d|
+ next if d == "." || d == ".."
+ name = File.join(dir, d)
+ if File.directory?(name)
+ search_dir(name, &callback)
+ else
+ callback.call name
+ end
+ end
+ end
+ end
+ end
+end \ No newline at end of file
diff --git a/activesupport/lib/active_support/cache/mem_cache_store.rb b/activesupport/lib/active_support/cache/mem_cache_store.rb
new file mode 100644
index 0000000000..5820d15cc5
--- /dev/null
+++ b/activesupport/lib/active_support/cache/mem_cache_store.rb
@@ -0,0 +1,51 @@
+require 'memcache'
+
+module ActiveSupport
+ module Cache
+ class MemCacheStore < Store
+ attr_reader :addresses
+
+ def initialize(*addresses)
+ addresses = addresses.flatten
+ addresses = ["localhost"] if addresses.empty?
+ @addresses = addresses
+ @data = MemCache.new(*addresses)
+ end
+
+ def read(key, options = nil)
+ super
+ @data.get(key, raw?(options))
+ rescue MemCache::MemCacheError
+ nil
+ end
+
+ def write(key, value, options = nil)
+ super
+ @data.set(key, value, expires_in(options), raw?(options))
+ rescue MemCache::MemCacheError
+ nil
+ end
+
+ def delete(key, options = nil)
+ super
+ @data.delete(key, expires_in(options))
+ rescue MemCache::MemCacheError
+ nil
+ end
+
+ def delete_matched(matcher, options = nil)
+ super
+ raise "Not supported by Memcache"
+ end
+
+ private
+ def expires_in(options)
+ (options && options[:expires_in]) || 0
+ end
+
+ def raw?(options)
+ options && options[:raw]
+ end
+ end
+ end
+end
diff --git a/activesupport/lib/active_support/cache/memory_store.rb b/activesupport/lib/active_support/cache/memory_store.rb
new file mode 100644
index 0000000000..e0aba6b19a
--- /dev/null
+++ b/activesupport/lib/active_support/cache/memory_store.rb
@@ -0,0 +1,29 @@
+module ActiveSupport
+ module Cache
+ class MemoryStore < Store
+ def initialize
+ @data = {}
+ end
+
+ def read(name, options = nil)
+ super
+ @data[name]
+ end
+
+ def write(name, value, options = nil)
+ super
+ @data[name] = value
+ end
+
+ def delete(name, options = nil)
+ super
+ @data.delete(name)
+ end
+
+ def delete_matched(matcher, options = nil)
+ super
+ @data.delete_if { |k,v| k =~ matcher }
+ end
+ end
+ end
+end \ No newline at end of file
diff --git a/activesupport/lib/active_support/core_ext/date/conversions.rb b/activesupport/lib/active_support/core_ext/date/conversions.rb
index f34d860117..684775c5e3 100644
--- a/activesupport/lib/active_support/core_ext/date/conversions.rb
+++ b/activesupport/lib/active_support/core_ext/date/conversions.rb
@@ -7,6 +7,7 @@ module ActiveSupport #:nodoc:
:short => "%e %b",
:long => "%B %e, %Y",
:db => "%Y-%m-%d",
+ :number => "%Y%m%d",
:long_ordinal => lambda { |date| date.strftime("%B #{date.day.ordinalize}, %Y") }, # => "April 25th, 2007"
:rfc822 => "%e %b %Y"
}
diff --git a/activesupport/lib/active_support/core_ext/hash/conversions.rb b/activesupport/lib/active_support/core_ext/hash/conversions.rb
index a758c3454b..f6ebb90400 100644
--- a/activesupport/lib/active_support/core_ext/hash/conversions.rb
+++ b/activesupport/lib/active_support/core_ext/hash/conversions.rb
@@ -94,6 +94,8 @@ module ActiveSupport #:nodoc:
value.to_query(namespace ? "#{namespace}[#{key}]" : key)
end.sort * '&'
end
+
+ alias_method :to_param, :to_query
def to_xml(options = {})
options[:indent] ||= 2
diff --git a/activesupport/lib/active_support/core_ext/time/conversions.rb b/activesupport/lib/active_support/core_ext/time/conversions.rb
index 0ce90669d2..ab076a5930 100644
--- a/activesupport/lib/active_support/core_ext/time/conversions.rb
+++ b/activesupport/lib/active_support/core_ext/time/conversions.rb
@@ -5,6 +5,7 @@ module ActiveSupport #:nodoc:
module Conversions
DATE_FORMATS = {
:db => "%Y-%m-%d %H:%M:%S",
+ :number => "%Y%m%d%H%M%S",
:time => "%H:%M",
:short => "%d %b %H:%M",
:long => "%B %d, %Y %H:%M",
diff --git a/activesupport/lib/active_support/gzip.rb b/activesupport/lib/active_support/gzip.rb
new file mode 100644
index 0000000000..c65944dacd
--- /dev/null
+++ b/activesupport/lib/active_support/gzip.rb
@@ -0,0 +1,22 @@
+require 'zlib'
+require 'stringio'
+
+module ActiveSupport
+ module Gzip
+ class Stream < StringIO
+ def close; rewind; end
+ end
+
+ def self.decompress(source)
+ Zlib::GzipReader.new(StringIO.new(source)).read
+ end
+
+ def self.compress(source)
+ output = Stream.new
+ gz = Zlib::GzipWriter.new(output)
+ gz.write(source)
+ gz.close
+ output.string
+ end
+ end
+end \ No newline at end of file
diff --git a/activesupport/lib/active_support/vendor.rb b/activesupport/lib/active_support/vendor.rb
index 75c18062c0..6cc7ad8aa1 100644
--- a/activesupport/lib/active_support/vendor.rb
+++ b/activesupport/lib/active_support/vendor.rb
@@ -12,3 +12,9 @@ begin
rescue Gem::LoadError
$:.unshift "#{File.dirname(__FILE__)}/vendor/xml-simple-1.0.11"
end
+
+begin
+ gem 'memcache-client', '~> 1.5.0'
+rescue Gem::LoadError
+ $:.unshift "#{File.dirname(__FILE__)}/vendor/memcache-client-1.5.0"
+end \ No newline at end of file
diff --git a/activesupport/lib/active_support/vendor/memcache-client-1.5.0/memcache.rb b/activesupport/lib/active_support/vendor/memcache-client-1.5.0/memcache.rb
new file mode 100644
index 0000000000..8c01b2e89d
--- /dev/null
+++ b/activesupport/lib/active_support/vendor/memcache-client-1.5.0/memcache.rb
@@ -0,0 +1,832 @@
+# All original code copyright 2005, 2006, 2007 Bob Cottrell, Eric Hodel,
+# The Robot Co-op. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# 1. Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+# 3. Neither the names of the authors nor the names of their contributors
+# may be used to endorse or promote products derived from this software
+# without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS
+# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE
+# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
+# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
+# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
+# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+require 'socket'
+require 'thread'
+require 'timeout'
+require 'rubygems'
+
+class String
+
+ ##
+ # Uses the ITU-T polynomial in the CRC32 algorithm.
+
+ def crc32_ITU_T
+ n = length
+ r = 0xFFFFFFFF
+
+ n.times do |i|
+ r ^= self[i]
+ 8.times do
+ if (r & 1) != 0 then
+ r = (r>>1) ^ 0xEDB88320
+ else
+ r >>= 1
+ end
+ end
+ end
+
+ r ^ 0xFFFFFFFF
+ end
+
+end
+
+##
+# A Ruby client library for memcached.
+#
+# This is intended to provide access to basic memcached functionality. It
+# does not attempt to be complete implementation of the entire API, but it is
+# approaching a complete implementation.
+
+class MemCache
+
+ ##
+ # The version of MemCache you are using.
+
+ VERSION = '1.5.0'
+
+ ##
+ # Default options for the cache object.
+
+ DEFAULT_OPTIONS = {
+ :namespace => nil,
+ :readonly => false,
+ :multithread => false,
+ }
+
+ ##
+ # Default memcached port.
+
+ DEFAULT_PORT = 11211
+
+ ##
+ # Default memcached server weight.
+
+ DEFAULT_WEIGHT = 1
+
+ ##
+ # The amount of time to wait for a response from a memcached server. If a
+ # response is not completed within this time, the connection to the server
+ # will be closed and an error will be raised.
+
+ attr_accessor :request_timeout
+
+ ##
+ # The namespace for this instance
+
+ attr_reader :namespace
+
+ ##
+ # The multithread setting for this instance
+
+ attr_reader :multithread
+
+ ##
+ # The servers this client talks to. Play at your own peril.
+
+ attr_reader :servers
+
+ ##
+ # Accepts a list of +servers+ and a list of +opts+. +servers+ may be
+ # omitted. See +servers=+ for acceptable server list arguments.
+ #
+ # Valid options for +opts+ are:
+ #
+ # [:namespace] Prepends this value to all keys added or retrieved.
+ # [:readonly] Raises an exeception on cache writes when true.
+ # [:multithread] Wraps cache access in a Mutex for thread safety.
+ #
+ # Other options are ignored.
+
+ def initialize(*args)
+ servers = []
+ opts = {}
+
+ case args.length
+ when 0 then # NOP
+ when 1 then
+ arg = args.shift
+ case arg
+ when Hash then opts = arg
+ when Array then servers = arg
+ when String then servers = [arg]
+ else raise ArgumentError, 'first argument must be Array, Hash or String'
+ end
+ when 2 then
+ servers, opts = args
+ else
+ raise ArgumentError, "wrong number of arguments (#{args.length} for 2)"
+ end
+
+ opts = DEFAULT_OPTIONS.merge opts
+ @namespace = opts[:namespace]
+ @readonly = opts[:readonly]
+ @multithread = opts[:multithread]
+ @mutex = Mutex.new if @multithread
+ @buckets = []
+ self.servers = servers
+ end
+
+ ##
+ # Returns a string representation of the cache object.
+
+ def inspect
+ "<MemCache: %d servers, %d buckets, ns: %p, ro: %p>" %
+ [@servers.length, @buckets.length, @namespace, @readonly]
+ end
+
+ ##
+ # Returns whether there is at least one active server for the object.
+
+ def active?
+ not @servers.empty?
+ end
+
+ ##
+ # Returns whether or not the cache object was created read only.
+
+ def readonly?
+ @readonly
+ end
+
+ ##
+ # Set the servers that the requests will be distributed between. Entries
+ # can be either strings of the form "hostname:port" or
+ # "hostname:port:weight" or MemCache::Server objects.
+
+ def servers=(servers)
+ # Create the server objects.
+ @servers = servers.collect do |server|
+ case server
+ when String
+ host, port, weight = server.split ':', 3
+ port ||= DEFAULT_PORT
+ weight ||= DEFAULT_WEIGHT
+ Server.new self, host, port, weight
+ when Server
+ if server.memcache.multithread != @multithread then
+ raise ArgumentError, "can't mix threaded and non-threaded servers"
+ end
+ server
+ else
+ raise TypeError, "cannot convert #{server.class} into MemCache::Server"
+ end
+ end
+
+ # Create an array of server buckets for weight selection of servers.
+ @buckets = []
+ @servers.each do |server|
+ server.weight.times { @buckets.push(server) }
+ end
+ end
+
+ ##
+ # Deceremets the value for +key+ by +amount+ and returns the new value.
+ # +key+ must already exist. If +key+ is not an integer, it is assumed to be
+ # 0. +key+ can not be decremented below 0.
+
+ def decr(key, amount = 1)
+ server, cache_key = request_setup key
+
+ if @multithread then
+ threadsafe_cache_decr server, cache_key, amount
+ else
+ cache_decr server, cache_key, amount
+ end
+ rescue TypeError, SocketError, SystemCallError, IOError => err
+ handle_error server, err
+ end
+
+ ##
+ # Retrieves +key+ from memcache. If +raw+ is false, the value will be
+ # unmarshalled.
+
+ def get(key, raw = false)
+ server, cache_key = request_setup key
+
+ value = if @multithread then
+ threadsafe_cache_get server, cache_key
+ else
+ cache_get server, cache_key
+ end
+
+ return nil if value.nil?
+
+ value = Marshal.load value unless raw
+
+ return value
+ rescue TypeError, SocketError, SystemCallError, IOError => err
+ handle_error server, err
+ end
+
+ ##
+ # Retrieves multiple values from memcached in parallel, if possible.
+ #
+ # The memcached protocol supports the ability to retrieve multiple
+ # keys in a single request. Pass in an array of keys to this method
+ # and it will:
+ #
+ # 1. map the key to the appropriate memcached server
+ # 2. send a single request to each server that has one or more key values
+ #
+ # Returns a hash of values.
+ #
+ # cache["a"] = 1
+ # cache["b"] = 2
+ # cache.get_multi "a", "b" # => { "a" => 1, "b" => 2 }
+
+ def get_multi(*keys)
+ raise MemCacheError, 'No active servers' unless active?
+
+ keys.flatten!
+ key_count = keys.length
+ cache_keys = {}
+ server_keys = Hash.new { |h,k| h[k] = [] }
+
+ # map keys to servers
+ keys.each do |key|
+ server, cache_key = request_setup key
+ cache_keys[cache_key] = key
+ server_keys[server] << cache_key
+ end
+
+ results = {}
+
+ server_keys.each do |server, keys|
+ keys = keys.join ' '
+ values = if @multithread then
+ threadsafe_cache_get_multi server, keys
+ else
+ cache_get_multi server, keys
+ end
+ values.each do |key, value|
+ results[cache_keys[key]] = Marshal.load value
+ end
+ end
+
+ return results
+ rescue TypeError, SocketError, SystemCallError, IOError => err
+ handle_error server, err
+ end
+
+ ##
+ # Increments the value for +key+ by +amount+ and retruns the new value.
+ # +key+ must already exist. If +key+ is not an integer, it is assumed to be
+ # 0.
+
+ def incr(key, amount = 1)
+ server, cache_key = request_setup key
+
+ if @multithread then
+ threadsafe_cache_incr server, cache_key, amount
+ else
+ cache_incr server, cache_key, amount
+ end
+ rescue TypeError, SocketError, SystemCallError, IOError => err
+ handle_error server, err
+ end
+
+ ##
+ # Add +key+ to the cache with value +value+ that expires in +expiry+
+ # seconds. If +raw+ is true, +value+ will not be Marshalled.
+ #
+ # Warning: Readers should not call this method in the event of a cache miss;
+ # see MemCache#add.
+
+ def set(key, value, expiry = 0, raw = false)
+ raise MemCacheError, "Update of readonly cache" if @readonly
+ server, cache_key = request_setup key
+ socket = server.socket
+
+ value = Marshal.dump value unless raw
+ command = "set #{cache_key} 0 #{expiry} #{value.size}\r\n#{value}\r\n"
+
+ begin
+ @mutex.lock if @multithread
+ socket.write command
+ result = socket.gets
+ raise MemCacheError, $1.strip if result =~ /^SERVER_ERROR (.*)/
+ rescue SocketError, SystemCallError, IOError => err
+ server.close
+ raise MemCacheError, err.message
+ ensure
+ @mutex.unlock if @multithread
+ end
+ end
+
+ ##
+ # Add +key+ to the cache with value +value+ that expires in +expiry+
+ # seconds, but only if +key+ does not already exist in the cache.
+ # If +raw+ is true, +value+ will not be Marshalled.
+ #
+ # Readers should call this method in the event of a cache miss, not
+ # MemCache#set or MemCache#[]=.
+
+ def add(key, value, expiry = 0, raw = false)
+ raise MemCacheError, "Update of readonly cache" if @readonly
+ server, cache_key = request_setup key
+ socket = server.socket
+
+ value = Marshal.dump value unless raw
+ command = "add #{cache_key} 0 #{expiry} #{value.size}\r\n#{value}\r\n"
+
+ begin
+ @mutex.lock if @multithread
+ socket.write command
+ socket.gets
+ rescue SocketError, SystemCallError, IOError => err
+ server.close
+ raise MemCacheError, err.message
+ ensure
+ @mutex.unlock if @multithread
+ end
+ end
+
+ ##
+ # Removes +key+ from the cache in +expiry+ seconds.
+
+ def delete(key, expiry = 0)
+ @mutex.lock if @multithread
+
+ raise MemCacheError, "No active servers" unless active?
+ cache_key = make_cache_key key
+ server = get_server_for_key cache_key
+
+ sock = server.socket
+ raise MemCacheError, "No connection to server" if sock.nil?
+
+ begin
+ sock.write "delete #{cache_key} #{expiry}\r\n"
+ sock.gets
+ rescue SocketError, SystemCallError, IOError => err
+ server.close
+ raise MemCacheError, err.message
+ end
+ ensure
+ @mutex.unlock if @multithread
+ end
+
+ ##
+ # Flush the cache from all memcache servers.
+
+ def flush_all
+ raise MemCacheError, 'No active servers' unless active?
+ raise MemCacheError, "Update of readonly cache" if @readonly
+ begin
+ @mutex.lock if @multithread
+ @servers.each do |server|
+ begin
+ sock = server.socket
+ raise MemCacheError, "No connection to server" if sock.nil?
+ sock.write "flush_all\r\n"
+ result = sock.gets
+ raise MemCacheError, $2.strip if result =~ /^(SERVER_)?ERROR(.*)/
+ rescue SocketError, SystemCallError, IOError => err
+ server.close
+ raise MemCacheError, err.message
+ end
+ end
+ ensure
+ @mutex.unlock if @multithread
+ end
+ end
+
+ ##
+ # Reset the connection to all memcache servers. This should be called if
+ # there is a problem with a cache lookup that might have left the connection
+ # in a corrupted state.
+
+ def reset
+ @servers.each { |server| server.close }
+ end
+
+ ##
+ # Returns statistics for each memcached server. An explanation of the
+ # statistics can be found in the memcached docs:
+ #
+ # http://code.sixapart.com/svn/memcached/trunk/server/doc/protocol.txt
+ #
+ # Example:
+ #
+ # >> pp CACHE.stats
+ # {"localhost:11211"=>
+ # {"bytes"=>4718,
+ # "pid"=>20188,
+ # "connection_structures"=>4,
+ # "time"=>1162278121,
+ # "pointer_size"=>32,
+ # "limit_maxbytes"=>67108864,
+ # "cmd_get"=>14532,
+ # "version"=>"1.2.0",
+ # "bytes_written"=>432583,
+ # "cmd_set"=>32,
+ # "get_misses"=>0,
+ # "total_connections"=>19,
+ # "curr_connections"=>3,
+ # "curr_items"=>4,
+ # "uptime"=>1557,
+ # "get_hits"=>14532,
+ # "total_items"=>32,
+ # "rusage_system"=>0.313952,
+ # "rusage_user"=>0.119981,
+ # "bytes_read"=>190619}}
+ # => nil
+
+ def stats
+ raise MemCacheError, "No active servers" unless active?
+ server_stats = {}
+
+ @servers.each do |server|
+ sock = server.socket
+ raise MemCacheError, "No connection to server" if sock.nil?
+
+ value = nil
+ begin
+ sock.write "stats\r\n"
+ stats = {}
+ while line = sock.gets do
+ break if line == "END\r\n"
+ if line =~ /^STAT ([\w]+) ([\w\.\:]+)/ then
+ name, value = $1, $2
+ stats[name] = case name
+ when 'version'
+ value
+ when 'rusage_user', 'rusage_system' then
+ seconds, microseconds = value.split(/:/, 2)
+ microseconds ||= 0
+ Float(seconds) + (Float(microseconds) / 1_000_000)
+ else
+ if value =~ /^\d+$/ then
+ value.to_i
+ else
+ value
+ end
+ end
+ end
+ end
+ server_stats["#{server.host}:#{server.port}"] = stats
+ rescue SocketError, SystemCallError, IOError => err
+ server.close
+ raise MemCacheError, err.message
+ end
+ end
+
+ server_stats
+ end
+
+ ##
+ # Shortcut to get a value from the cache.
+
+ alias [] get
+
+ ##
+ # Shortcut to save a value in the cache. This method does not set an
+ # expiration on the entry. Use set to specify an explicit expiry.
+
+ def []=(key, value)
+ set key, value
+ end
+
+ protected
+
+ ##
+ # Create a key for the cache, incorporating the namespace qualifier if
+ # requested.
+
+ def make_cache_key(key)
+ if namespace.nil? then
+ key
+ else
+ "#{@namespace}:#{key}"
+ end
+ end
+
+ ##
+ # Pick a server to handle the request based on a hash of the key.
+
+ def get_server_for_key(key)
+ raise ArgumentError, "illegal character in key #{key.inspect}" if
+ key =~ /\s/
+ raise ArgumentError, "key too long #{key.inspect}" if key.length > 250
+ raise MemCacheError, "No servers available" if @servers.empty?
+ return @servers.first if @servers.length == 1
+
+ hkey = hash_for key
+
+ 20.times do |try|
+ server = @buckets[hkey % @buckets.nitems]
+ return server if server.alive?
+ hkey += hash_for "#{try}#{key}"
+ end
+
+ raise MemCacheError, "No servers available"
+ end
+
+ ##
+ # Returns an interoperable hash value for +key+. (I think, docs are
+ # sketchy for down servers).
+
+ def hash_for(key)
+ (key.crc32_ITU_T >> 16) & 0x7fff
+ end
+
+ ##
+ # Performs a raw decr for +cache_key+ from +server+. Returns nil if not
+ # found.
+
+ def cache_decr(server, cache_key, amount)
+ socket = server.socket
+ socket.write "decr #{cache_key} #{amount}\r\n"
+ text = socket.gets
+ return nil if text == "NOT_FOUND\r\n"
+ return text.to_i
+ end
+
+ ##
+ # Fetches the raw data for +cache_key+ from +server+. Returns nil on cache
+ # miss.
+
+ def cache_get(server, cache_key)
+ socket = server.socket
+ socket.write "get #{cache_key}\r\n"
+ keyline = socket.gets # "VALUE <key> <flags> <bytes>\r\n"
+
+ if keyline.nil? then
+ server.close
+ raise MemCacheError, "lost connection to #{server.host}:#{server.port}"
+ end
+
+ return nil if keyline == "END\r\n"
+
+ unless keyline =~ /(\d+)\r/ then
+ server.close
+ raise MemCacheError, "unexpected response #{keyline.inspect}"
+ end
+ value = socket.read $1.to_i
+ socket.read 2 # "\r\n"
+ socket.gets # "END\r\n"
+ return value
+ end
+
+ ##
+ # Fetches +cache_keys+ from +server+ using a multi-get.
+
+ def cache_get_multi(server, cache_keys)
+ values = {}
+ socket = server.socket
+ socket.write "get #{cache_keys}\r\n"
+
+ while keyline = socket.gets do
+ return values if keyline == "END\r\n"
+
+ unless keyline =~ /^VALUE (.+) (.+) (.+)/ then
+ server.close
+ raise MemCacheError, "unexpected response #{keyline.inspect}"
+ end
+
+ key, data_length = $1, $3
+ values[$1] = socket.read data_length.to_i
+ socket.read(2) # "\r\n"
+ end
+
+ server.close
+ raise MemCacheError, "lost connection to #{server.host}:#{server.port}"
+ end
+
+ ##
+ # Performs a raw incr for +cache_key+ from +server+. Returns nil if not
+ # found.
+
+ def cache_incr(server, cache_key, amount)
+ socket = server.socket
+ socket.write "incr #{cache_key} #{amount}\r\n"
+ text = socket.gets
+ return nil if text == "NOT_FOUND\r\n"
+ return text.to_i
+ end
+
+ ##
+ # Handles +error+ from +server+.
+
+ def handle_error(server, error)
+ server.close if server
+ new_error = MemCacheError.new error.message
+ new_error.set_backtrace error.backtrace
+ raise new_error
+ end
+
+ ##
+ # Performs setup for making a request with +key+ from memcached. Returns
+ # the server to fetch the key from and the complete key to use.
+
+ def request_setup(key)
+ raise MemCacheError, 'No active servers' unless active?
+ cache_key = make_cache_key key
+ server = get_server_for_key cache_key
+ raise MemCacheError, 'No connection to server' if server.socket.nil?
+ return server, cache_key
+ end
+
+ def threadsafe_cache_decr(server, cache_key, amount) # :nodoc:
+ @mutex.lock
+ cache_decr server, cache_key, amount
+ ensure
+ @mutex.unlock
+ end
+
+ def threadsafe_cache_get(server, cache_key) # :nodoc:
+ @mutex.lock
+ cache_get server, cache_key
+ ensure
+ @mutex.unlock
+ end
+
+ def threadsafe_cache_get_multi(socket, cache_keys) # :nodoc:
+ @mutex.lock
+ cache_get_multi socket, cache_keys
+ ensure
+ @mutex.unlock
+ end
+
+ def threadsafe_cache_incr(server, cache_key, amount) # :nodoc:
+ @mutex.lock
+ cache_incr server, cache_key, amount
+ ensure
+ @mutex.unlock
+ end
+
+ ##
+ # This class represents a memcached server instance.
+
+ class Server
+
+ ##
+ # The amount of time to wait to establish a connection with a memcached
+ # server. If a connection cannot be established within this time limit,
+ # the server will be marked as down.
+
+ CONNECT_TIMEOUT = 0.25
+
+ ##
+ # The amount of time to wait before attempting to re-establish a
+ # connection with a server that is marked dead.
+
+ RETRY_DELAY = 30.0
+
+ ##
+ # The host the memcached server is running on.
+
+ attr_reader :host
+
+ ##
+ # The port the memcached server is listening on.
+
+ attr_reader :port
+
+ ##
+ # The weight given to the server.
+
+ attr_reader :weight
+
+ ##
+ # The time of next retry if the connection is dead.
+
+ attr_reader :retry
+
+ ##
+ # A text status string describing the state of the server.
+
+ attr_reader :status
+
+ ##
+ # Create a new MemCache::Server object for the memcached instance
+ # listening on the given host and port, weighted by the given weight.
+
+ def initialize(memcache, host, port = DEFAULT_PORT, weight = DEFAULT_WEIGHT)
+ raise ArgumentError, "No host specified" if host.nil? or host.empty?
+ raise ArgumentError, "No port specified" if port.nil? or port.to_i.zero?
+
+ @memcache = memcache
+ @host = host
+ @port = port.to_i
+ @weight = weight.to_i
+
+ @multithread = @memcache.multithread
+ @mutex = Mutex.new
+
+ @sock = nil
+ @retry = nil
+ @status = 'NOT CONNECTED'
+ end
+
+ ##
+ # Return a string representation of the server object.
+
+ def inspect
+ "<MemCache::Server: %s:%d [%d] (%s)>" % [@host, @port, @weight, @status]
+ end
+
+ ##
+ # Check whether the server connection is alive. This will cause the
+ # socket to attempt to connect if it isn't already connected and or if
+ # the server was previously marked as down and the retry time has
+ # been exceeded.
+
+ def alive?
+ !!socket
+ end
+
+ ##
+ # Try to connect to the memcached server targeted by this object.
+ # Returns the connected socket object on success or nil on failure.
+
+ def socket
+ @mutex.lock if @multithread
+ return @sock if @sock and not @sock.closed?
+
+ @sock = nil
+
+ # If the host was dead, don't retry for a while.
+ return if @retry and @retry > Time.now
+
+ # Attempt to connect if not already connected.
+ begin
+ @sock = timeout CONNECT_TIMEOUT do
+ TCPSocket.new @host, @port
+ end
+ if Socket.constants.include? 'TCP_NODELAY' then
+ @sock.setsockopt Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1
+ end
+ @retry = nil
+ @status = 'CONNECTED'
+ rescue SocketError, SystemCallError, IOError, Timeout::Error => err
+ mark_dead err.message
+ end
+
+ return @sock
+ ensure
+ @mutex.unlock if @multithread
+ end
+
+ ##
+ # Close the connection to the memcached server targeted by this
+ # object. The server is not considered dead.
+
+ def close
+ @mutex.lock if @multithread
+ @sock.close if @sock && !@sock.closed?
+ @sock = nil
+ @retry = nil
+ @status = "NOT CONNECTED"
+ ensure
+ @mutex.unlock if @multithread
+ end
+
+ private
+
+ ##
+ # Mark the server as dead and close its socket.
+
+ def mark_dead(reason = "Unknown error")
+ @sock.close if @sock && !@sock.closed?
+ @sock = nil
+ @retry = Time.now + RETRY_DELAY
+
+ @status = sprintf "DEAD: %s, will retry at %s", reason, @retry
+ end
+
+ end
+
+ ##
+ # Base MemCache exception class.
+
+ class MemCacheError < RuntimeError; end
+
+end
+
diff --git a/activesupport/test/caching_test.rb b/activesupport/test/caching_test.rb
new file mode 100644
index 0000000000..592eede63e
--- /dev/null
+++ b/activesupport/test/caching_test.rb
@@ -0,0 +1,27 @@
+require File.dirname(__FILE__) + '/abstract_unit'
+
+class CacheStoreSettingTest < Test::Unit::TestCase
+ def test_file_fragment_cache_store
+ store = ActiveSupport::Cache.lookup_store :file_store, "/path/to/cache/directory"
+ assert_kind_of(ActiveSupport::Cache::FileStore, store)
+ assert_equal "/path/to/cache/directory", store.cache_path
+ end
+
+ def test_drb_fragment_cache_store
+ store = ActiveSupport::Cache.lookup_store :drb_store, "druby://localhost:9192"
+ assert_kind_of(ActiveSupport::Cache::DRbStore, store)
+ assert_equal "druby://localhost:9192", store.address
+ end
+
+ def test_mem_cache_fragment_cache_store
+ store = ActiveSupport::Cache.lookup_store :mem_cache_store, "localhost"
+ assert_kind_of(ActiveSupport::Cache::MemCacheStore, store)
+ assert_equal %w(localhost), store.addresses
+ end
+
+ def test_object_assigned_fragment_cache_store
+ store = ActiveSupport::Cache.lookup_store ActiveSupport::Cache::FileStore.new("/path/to/cache/directory")
+ assert_kind_of(ActiveSupport::Cache::FileStore, store)
+ assert_equal "/path/to/cache/directory", store.cache_path
+ end
+end
diff --git a/railties/CHANGELOG b/railties/CHANGELOG
index 4c93973cc2..f1c8b0abd1 100644
--- a/railties/CHANGELOG
+++ b/railties/CHANGELOG
@@ -1,5 +1,7 @@
*SVN*
+* Added config.cache_store to environment options to control the default cache store (default is FileStore if tmp/cache is present, otherwise MemoryStore is used) [DHH]
+
* Added that rails:update is run when you do rails:freeze:edge to ensure you also get the latest JS and config files #10565 [jeff]
* SQLite: db:drop:all doesn't fail silently if the database is already open. #10577 [Cheah Chu Yeow, mrichman]
diff --git a/railties/lib/initializer.rb b/railties/lib/initializer.rb
index 5aed10afb5..c3056c196d 100644
--- a/railties/lib/initializer.rb
+++ b/railties/lib/initializer.rb
@@ -58,29 +58,7 @@ module Rails
end
# Sequentially step through all of the available initialization routines,
- # in order:
- #
- # * #check_ruby_version
- # * #set_load_path
- # * #require_frameworks
- # * #set_autoload_paths
- # * add_plugin_load_paths
- # * #load_environment
- # * #initialize_encoding
- # * #initialize_database
- # * #initialize_logger
- # * #initialize_framework_logging
- # * #initialize_framework_views
- # * #initialize_dependency_mechanism
- # * #initialize_whiny_nils
- # * #initialize_temporary_directories
- # * #initialize_framework_settings
- # * #add_support_load_paths
- # * #load_plugins
- # * #load_observers
- # * #initialize_routing
- # * #after_initialize
- # * #load_application_initializers
+ # in order (view execution order in source).
def process
check_ruby_version
set_load_path
@@ -92,12 +70,17 @@ module Rails
initialize_encoding
initialize_database
+
+ initialize_cache
+ initialize_framework_caches
+
initialize_logger
initialize_framework_logging
+
initialize_framework_views
initialize_dependency_mechanism
initialize_whiny_nils
- initialize_temporary_directories
+ initialize_temporary_session_directory
initialize_framework_settings
add_support_load_paths
@@ -239,6 +222,18 @@ module Rails
end
end
+ def initialize_cache
+ unless defined?(RAILS_CACHE)
+ silence_warnings { Object.const_set "RAILS_CACHE", ActiveSupport::Cache.lookup_store(configuration.cache_store) }
+ end
+ end
+
+ def initialize_framework_caches
+ if configuration.frameworks.include?(:action_controller)
+ ActionController::Base.cache_store ||= RAILS_CACHE
+ end
+ end
+
# If the +RAILS_DEFAULT_LOGGER+ constant is already set, this initialization
# routine does nothing. If the constant is not set, and Configuration#logger
# is not +nil+, this also does nothing. Otherwise, a new logger instance
@@ -277,6 +272,8 @@ module Rails
for framework in ([ :active_record, :action_controller, :action_mailer ] & configuration.frameworks)
framework.to_s.camelize.constantize.const_get("Base").logger ||= RAILS_DEFAULT_LOGGER
end
+
+ RAILS_CACHE.logger ||= RAILS_DEFAULT_LOGGER
end
# Sets +ActionController::Base#view_paths+ and +ActionMailer::Base#template_root+
@@ -309,15 +306,10 @@ module Rails
require('active_support/whiny_nil') if configuration.whiny_nils
end
- def initialize_temporary_directories
+ def initialize_temporary_session_directory
if configuration.frameworks.include?(:action_controller)
session_path = "#{configuration.root_path}/tmp/sessions/"
ActionController::Base.session_options[:tmpdir] = File.exist?(session_path) ? session_path : Dir::tmpdir
-
- cache_path = "#{configuration.root_path}/tmp/cache/"
- if File.exist?(cache_path)
- ActionController::Base.fragment_cache_store = :file_store, cache_path
- end
end
end
@@ -418,6 +410,9 @@ module Rails
# used directly.
attr_accessor :logger
+ # The specific cache store to use. By default, the ActiveSupport::Cache::Store will be used.
+ attr_accessor :cache_store
+
# The root of the application's views. (Defaults to <tt>app/views</tt>.)
attr_accessor :view_path
@@ -647,6 +642,14 @@ module Rails
def default_plugin_loader
Plugin::Loader
end
+
+ def default_cache_store
+ if File.exist?("#{root_path}/tmp/cache/")
+ [ :file_store, "#{root_path}/tmp/cache/" ]
+ else
+ :memory_store
+ end
+ end
end
end