aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--actionpack/CHANGELOG2
-rw-r--r--actionpack/lib/action_controller/integration.rb9
-rw-r--r--actionpack/lib/action_controller/performance_test.rb16
-rw-r--r--activesupport/lib/active_support/testing/performance.rb325
-rw-r--r--railties/helpers/performance_test.rb8
-rw-r--r--railties/helpers/performance_test_helper.rb6
-rw-r--r--railties/lib/rails_generator/generators/applications/app/app_generator.rb3
-rw-r--r--railties/lib/tasks/testing.rake15
8 files changed, 380 insertions, 4 deletions
diff --git a/actionpack/CHANGELOG b/actionpack/CHANGELOG
index 8d1acb265f..affefabe4b 100644
--- a/actionpack/CHANGELOG
+++ b/actionpack/CHANGELOG
@@ -1,5 +1,7 @@
*Edge*
+* Performance: integration test benchmarking and profiling. [Jeremy Kemper]
+
* Make caching more aware of mime types. Ensure request format is not considered while expiring cache. [Jonathan del Strother]
* Drop ActionController::Base.allow_concurrency flag [Josh Peek]
diff --git a/actionpack/lib/action_controller/integration.rb b/actionpack/lib/action_controller/integration.rb
index bd69d02ed7..18c2df8b37 100644
--- a/actionpack/lib/action_controller/integration.rb
+++ b/actionpack/lib/action_controller/integration.rb
@@ -1,9 +1,10 @@
-require 'stringio'
-require 'uri'
-
+require 'active_support/test_case'
require 'action_controller/dispatcher'
require 'action_controller/test_process'
+require 'stringio'
+require 'uri'
+
module ActionController
module Integration #:nodoc:
# An integration Session instance represents a set of requests and responses
@@ -580,7 +581,7 @@ EOF
# end
# end
# end
- class IntegrationTest < Test::Unit::TestCase
+ class IntegrationTest < ActiveSupport::TestCase
include Integration::Runner
# Work around a bug in test/unit caused by the default test being named
diff --git a/actionpack/lib/action_controller/performance_test.rb b/actionpack/lib/action_controller/performance_test.rb
new file mode 100644
index 0000000000..85543fffae
--- /dev/null
+++ b/actionpack/lib/action_controller/performance_test.rb
@@ -0,0 +1,16 @@
+require 'action_controller/integration'
+require 'active_support/testing/performance'
+require 'active_support/testing/default'
+
+module ActionController
+ # An integration test that runs a code profiler on your test methods.
+ # Profiling output for combinations of each test method, measurement, and
+ # output format are written to your tmp/performance directory.
+ #
+ # By default, process_time is measured and both flat and graph_html output
+ # formats are written, so you'll have two output files per test method.
+ class PerformanceTest < ActionController::IntegrationTest
+ include ActiveSupport::Testing::Performance
+ include ActiveSupport::Testing::Default
+ end
+end
diff --git a/activesupport/lib/active_support/testing/performance.rb b/activesupport/lib/active_support/testing/performance.rb
new file mode 100644
index 0000000000..b8e4f46d70
--- /dev/null
+++ b/activesupport/lib/active_support/testing/performance.rb
@@ -0,0 +1,325 @@
+require 'rubygems'
+gem 'ruby-prof', '>= 0.6.1'
+require 'ruby-prof'
+
+require 'fileutils'
+require 'rails/version'
+
+module ActiveSupport
+ module Testing
+ module Performance
+ benchmark = ARGV.include?('--benchmark') # HAX for rake test
+
+ DEFAULTS = {
+ :benchmark => benchmark,
+ :runs => benchmark ? 10 : 1,
+ :min_percent => 0.02,
+ :metrics => [:process_time, :wall_time, :cpu_time, :memory, :objects],
+ :formats => [:flat, :graph_html, :call_tree],
+ :output => 'tmp/performance'
+ }
+
+ def self.included(base)
+ base.class_inheritable_hash :profile_options
+ base.profile_options = DEFAULTS.dup
+ end
+
+ def full_test_name
+ "#{self.class.name}##{method_name}"
+ end
+
+ def run(result)
+ return if method_name =~ /^default_test$/
+
+ yield(self.class::STARTED, name)
+ @_result = result
+
+ run_warmup
+ profile_options[:metrics].each do |metric_name|
+ if klass = Metrics[metric_name.to_sym]
+ run_profile(klass.new)
+ result.add_run
+ else
+ $stderr.puts "Skipping unknown metric #{metric_name.inspect}. Expected :process_time, :wall_time, :cpu_time, :memory, or :objects."
+ end
+ end
+
+ yield(self.class::FINISHED, name)
+ end
+
+ def run_test(metric, mode)
+ run_callbacks :setup
+ setup
+ metric.send(mode) { __send__ @method_name }
+ rescue ::Test::Unit::AssertionFailedError => e
+ add_failure(e.message, e.backtrace)
+ rescue StandardError, ScriptError
+ add_error($!)
+ ensure
+ begin
+ teardown
+ run_callbacks :teardown, :enumerator => :reverse_each
+ rescue ::Test::Unit::AssertionFailedError => e
+ add_failure(e.message, e.backtrace)
+ rescue StandardError, ScriptError
+ add_error($!)
+ end
+ end
+
+ protected
+ def run_warmup
+ time = Metrics::Time.new
+ run_test(time, :benchmark)
+ puts "%s (%s warmup)" % [full_test_name, time.format(time.total)]
+ end
+
+ def run_profile(metric)
+ klass = profile_options[:benchmark] ? Benchmarker : Profiler
+ performer = klass.new(self, metric)
+
+ performer.run
+ puts performer.report
+ performer.record
+ end
+
+ class Performer
+ delegate :run_test, :profile_options, :full_test_name, :to => :@harness
+
+ def initialize(harness, metric)
+ @harness, @metric = harness, metric
+ end
+
+ def report
+ rate = @total / profile_options[:runs]
+ '%20s: %s/run' % [@metric.name, @metric.format(rate)]
+ end
+ end
+
+ class Benchmarker < Performer
+ def run
+ profile_options[:runs].to_i.times { run_test(@metric, :benchmark) }
+ @total = @metric.total
+ end
+
+ def record
+ avg = @metric.total / profile_options[:runs].to_i
+ data = [full_test_name, @metric.name, avg, Time.now.utc.xmlschema] * ','
+ with_output_file do |file|
+ file.puts "#{data},#{environment}"
+ end
+ end
+
+ def environment
+ unless defined? @env
+ app = "#{$1}.#{$2}" if `git branch -v` =~ /^\* (\S+)\s+(\S+)/
+
+ rails = Rails::VERSION::STRING
+ if File.directory?('vendor/rails/.git')
+ Dir.chdir('vendor/rails') do
+ rails += ".#{$1}.#{$2}" if `git branch -v` =~ /^\* (\S+)\s+(\S+)/
+ end
+ end
+
+ ruby = defined?(RUBY_ENGINE) ? RUBY_ENGINE : 'ruby'
+ ruby += "-#{RUBY_VERSION}.#{RUBY_PATCHLEVEL}"
+
+ @env = [app, rails, ruby, RUBY_PLATFORM] * ','
+ end
+
+ @env
+ end
+
+ protected
+ HEADER = 'test,metric,measurement,created_at,app,rails,ruby,platform'
+
+ def with_output_file
+ fname = "#{profile_options[:output]}/benchmarks.csv"
+
+ if new = !File.exist?(fname)
+ FileUtils.mkdir_p(File.dirname(fname))
+ end
+
+ File.open(fname, 'ab') do |file|
+ file.puts(HEADER) if new
+ yield file
+ end
+ end
+ end
+
+ class Profiler < Performer
+ def run
+ RubyProf.measure_mode = @metric.measure_mode
+ RubyProf.start
+ RubyProf.pause
+ profile_options[:runs].to_i.times { run_test(@metric, :profile) }
+ @data = RubyProf.stop
+ @total = @data.threads.values.sum(0) { |method_infos| method_infos.sort.last.total_time }
+ end
+
+ def record
+ klasses = profile_options[:formats].map { |f| RubyProf.const_get("#{f.to_s.camelize}Printer") }.compact
+
+ klasses.each do |klass|
+ fname = output_filename(klass)
+ FileUtils.mkdir_p(File.dirname(fname))
+ File.open(fname, 'wb') do |file|
+ klass.new(@data).print(file, profile_options.slice(:min_percent))
+ end
+ end
+ end
+
+ protected
+ def output_filename(printer_class)
+ suffix =
+ case printer_class.name.demodulize
+ when 'FlatPrinter'; 'flat.txt'
+ when 'GraphPrinter'; 'graph.txt'
+ when 'GraphHtmlPrinter'; 'graph.html'
+ when 'CallTreePrinter'; 'tree.txt'
+ else printer_class.name.sub(/Printer$/, '').underscore
+ end
+
+ "#{profile_options[:output]}/#{full_test_name}_#{@metric.name}_#{suffix}"
+ end
+ end
+
+ module Metrics
+ def self.[](name)
+ klass = const_get(name.to_s.camelize)
+ klass if klass::Mode
+ rescue NameError
+ nil
+ end
+
+ class Base
+ attr_reader :total
+
+ def initialize
+ @total = 0
+ end
+
+ def name
+ @name ||= self.class.name.demodulize.underscore
+ end
+
+ def measure_mode
+ self.class::Mode
+ end
+
+ def benchmark
+ with_gc_stats do
+ before = measure
+ yield
+ @total += (measure - before)
+ end
+ end
+
+ def profile
+ RubyProf.resume
+ yield
+ ensure
+ RubyProf.pause
+ end
+
+ protected
+ if GC.respond_to?(:enable_stats)
+ def with_gc_stats
+ GC.enable_stats
+ yield
+ ensure
+ GC.disable_stats
+ end
+ else
+ def with_gc_stats
+ yield
+ end
+ end
+ end
+
+ class Time < Base
+ def measure
+ ::Time.now.to_f
+ end
+
+ def format(measurement)
+ if measurement < 2
+ '%d ms' % (measurement * 1000)
+ else
+ '%.2f sec' % measurement
+ end
+ end
+ end
+
+ class ProcessTime < Time
+ Mode = RubyProf::PROCESS_TIME
+
+ def measure
+ RubyProf.measure_process_time
+ end
+ end
+
+ class WallTime < Time
+ Mode = RubyProf::WALL_TIME
+
+ def measure
+ RubyProf.measure_wall_time
+ end
+ end
+
+ class CpuTime < Time
+ Mode = RubyProf::CPU_TIME
+
+ def initialize(*args)
+ # FIXME: yeah my CPU is 2.33 GHz
+ RubyProf.cpu_frequency = 2.33e9
+ super
+ end
+
+ def measure
+ RubyProf.measure_cpu_time
+ end
+ end
+
+ class Memory < Base
+ Mode = RubyProf::MEMORY
+
+ if RubyProf.respond_to?(:measure_memory)
+ def measure
+ RubyProf.measure_memory / 1024.0
+ end
+ elsif GC.respond_to?(:allocated_size)
+ def measure
+ GC.allocated_size / 1024.0
+ end
+ elsif GC.respond_to?(:malloc_allocated_size)
+ def measure
+ GC.malloc_allocated_size / 1024.0
+ end
+ end
+
+ def format(measurement)
+ '%.2f KB' % measurement
+ end
+ end
+
+ class Objects < Base
+ Mode = RubyProf::ALLOCATIONS
+
+ if RubyProf.respond_to?(:measure_allocations)
+ def measure
+ RubyProf.measure_allocations
+ end
+ elsif ObjectSpace.respond_to?(:allocated_objects)
+ def measure
+ ObjectSpace.allocated_objects
+ end
+ end
+
+ def format(measurement)
+ measurement.to_i.to_s
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/railties/helpers/performance_test.rb b/railties/helpers/performance_test.rb
new file mode 100644
index 0000000000..7c89816570
--- /dev/null
+++ b/railties/helpers/performance_test.rb
@@ -0,0 +1,8 @@
+require 'performance/test_helper'
+
+# Profiling results for each test method are written to tmp/performance.
+class BrowsingTest < ActionController::PerformanceTest
+ def test_homepage
+ get '/'
+ end
+end
diff --git a/railties/helpers/performance_test_helper.rb b/railties/helpers/performance_test_helper.rb
new file mode 100644
index 0000000000..3c4c7fb740
--- /dev/null
+++ b/railties/helpers/performance_test_helper.rb
@@ -0,0 +1,6 @@
+require 'test_helper'
+require 'action_controller/performance_test'
+
+ActionController::Base.perform_caching = true
+ActiveSupport::Dependencies.mechanism = :require
+Rails.logger.level = ActiveSupport::BufferedLogger::INFO
diff --git a/railties/lib/rails_generator/generators/applications/app/app_generator.rb b/railties/lib/rails_generator/generators/applications/app/app_generator.rb
index 2f2dd82682..80e8eabfd3 100644
--- a/railties/lib/rails_generator/generators/applications/app/app_generator.rb
+++ b/railties/lib/rails_generator/generators/applications/app/app_generator.rb
@@ -51,6 +51,8 @@ class AppGenerator < Rails::Generator::Base
m.template "helpers/application.rb", "app/controllers/application.rb", :assigns => { :app_name => @app_name, :app_secret => md5.hexdigest }
m.template "helpers/application_helper.rb", "app/helpers/application_helper.rb"
m.template "helpers/test_helper.rb", "test/test_helper.rb"
+ m.template "helpers/performance_test_helper.rb", "test/performance/test_helper.rb"
+ m.template "helpers/performance_test.rb", "test/performance/browsing_test.rb"
# database.yml and routes.rb
m.template "configs/databases/#{options[:db]}.yml", "config/database.yml", :assigns => {
@@ -155,6 +157,7 @@ class AppGenerator < Rails::Generator::Base
test/fixtures
test/functional
test/integration
+ test/performance
test/unit
vendor
vendor/plugins
diff --git a/railties/lib/tasks/testing.rake b/railties/lib/tasks/testing.rake
index cc2376cbb3..c8ba6eed94 100644
--- a/railties/lib/tasks/testing.rake
+++ b/railties/lib/tasks/testing.rake
@@ -103,6 +103,21 @@ namespace :test do
end
Rake::Task['test:integration'].comment = "Run the integration tests in test/integration"
+ Rake::TestTask.new(:benchmark => 'db:test:prepare') do |t|
+ t.libs << 'test'
+ t.pattern = 'test/performance/**/*_test.rb'
+ t.verbose = true
+ t.options = '-- --benchmark'
+ end
+ Rake::Task['test:benchmark'].comment = 'Benchmark the performance tests'
+
+ Rake::TestTask.new(:profile => 'db:test:prepare') do |t|
+ t.libs << 'test'
+ t.pattern = 'test/performance/**/*_test.rb'
+ t.verbose = true
+ end
+ Rake::Task['test:profile'].comment = 'Profile the performance tests'
+
Rake::TestTask.new(:plugins => :environment) do |t|
t.libs << "test"