aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorYves Senn <yves.senn@gmail.com>2013-01-06 00:05:25 +0100
committerYves Senn <yves.senn@gmail.com>2013-01-10 17:09:06 +0100
commit3e1ed7818ba1abbc331c568327c6c0d64702985e (patch)
tree55b9e604c97849090f154f4a8feb2ad52a44858a
parent8d926f58d91020f9a8936ad6908939c0e298907b (diff)
downloadrails-3e1ed7818ba1abbc331c568327c6c0d64702985e.tar.gz
rails-3e1ed7818ba1abbc331c568327c6c0d64702985e.tar.bz2
rails-3e1ed7818ba1abbc331c568327c6c0d64702985e.zip
extract PerformanceTest into rails-performance_tests gem
-rw-r--r--actionpack/CHANGELOG.md7
-rw-r--r--actionpack/lib/action_controller.rb1
-rw-r--r--actionpack/lib/action_controller/deprecated/performance_test.rb3
-rw-r--r--actionpack/lib/action_dispatch.rb1
-rw-r--r--actionpack/lib/action_dispatch/testing/performance_test.rb10
-rw-r--r--activesupport/CHANGELOG.md8
-rw-r--r--activesupport/lib/active_support/testing/performance.rb271
-rw-r--r--activesupport/lib/active_support/testing/performance/jruby.rb115
-rw-r--r--activesupport/lib/active_support/testing/performance/rubinius.rb113
-rw-r--r--activesupport/lib/active_support/testing/performance/ruby.rb173
-rw-r--r--activesupport/test/testing/performance_test.rb68
-rw-r--r--railties/lib/rails/commands/benchmarker.rb34
-rw-r--r--railties/lib/rails/commands/profiler.rb32
-rw-r--r--railties/lib/rails/generators/rails/app/templates/test/performance/browsing_test.rb12
-rw-r--r--railties/lib/rails/generators/rails/performance_test/USAGE10
-rw-r--r--railties/lib/rails/generators/rails/performance_test/performance_test_generator.rb7
-rw-r--r--railties/lib/rails/generators/test_unit/performance/performance_generator.rb13
-rw-r--r--railties/lib/rails/generators/test_unit/performance/templates/performance_test.rb12
-rw-r--r--railties/lib/rails/performance_test_help.rb3
-rw-r--r--railties/lib/rails/test_unit/testing.rake11
-rw-r--r--railties/test/generators/performance_test_generator_test.rb12
21 files changed, 15 insertions, 901 deletions
diff --git a/actionpack/CHANGELOG.md b/actionpack/CHANGELOG.md
index 1a8f187979..7dea0e7fa5 100644
--- a/actionpack/CHANGELOG.md
+++ b/actionpack/CHANGELOG.md
@@ -1,5 +1,12 @@
## Rails 4.0.0 (unreleased) ##
+* Extract `ActionDispatch::PerformanceTest` into https://github.com/rails/rails-perftest
+ You can add the gem to your Gemfile to keep using performance tests.
+
+ gem 'rails-perftest'
+
+ *Yves Senn*
+
* Added view_cache_dependency API for declaring dependencies that affect
cache digest computation.
diff --git a/actionpack/lib/action_controller.rb b/actionpack/lib/action_controller.rb
index 1a13d7af29..9cacb3862b 100644
--- a/actionpack/lib/action_controller.rb
+++ b/actionpack/lib/action_controller.rb
@@ -42,7 +42,6 @@ module ActionController
autoload :Integration, 'action_controller/deprecated/integration_test'
autoload :IntegrationTest, 'action_controller/deprecated/integration_test'
- autoload :PerformanceTest, 'action_controller/deprecated/performance_test'
autoload :Routing, 'action_controller/deprecated'
autoload :TestCase, 'action_controller/test_case'
autoload :TemplateAssertions, 'action_controller/test_case'
diff --git a/actionpack/lib/action_controller/deprecated/performance_test.rb b/actionpack/lib/action_controller/deprecated/performance_test.rb
deleted file mode 100644
index c7ba5a2fe7..0000000000
--- a/actionpack/lib/action_controller/deprecated/performance_test.rb
+++ /dev/null
@@ -1,3 +0,0 @@
-ActionController::PerformanceTest = ActionDispatch::PerformanceTest
-
-ActiveSupport::Deprecation.warn 'ActionController::PerformanceTest is deprecated and will be removed, use ActionDispatch::PerformanceTest instead.'
diff --git a/actionpack/lib/action_dispatch.rb b/actionpack/lib/action_dispatch.rb
index b35761fb4a..9ab048b756 100644
--- a/actionpack/lib/action_dispatch.rb
+++ b/actionpack/lib/action_dispatch.rb
@@ -97,7 +97,6 @@ module ActionDispatch
autoload :Assertions
autoload :Integration
autoload :IntegrationTest, 'action_dispatch/testing/integration'
- autoload :PerformanceTest
autoload :TestProcess
autoload :TestRequest
autoload :TestResponse
diff --git a/actionpack/lib/action_dispatch/testing/performance_test.rb b/actionpack/lib/action_dispatch/testing/performance_test.rb
deleted file mode 100644
index 13fe693c32..0000000000
--- a/actionpack/lib/action_dispatch/testing/performance_test.rb
+++ /dev/null
@@ -1,10 +0,0 @@
-require 'active_support/testing/performance'
-
-module ActionDispatch
- # An integration test that runs a code profiler on your test methods.
- # Profiling output for combinations of each test method, measurement, and
- # output format are written to your tmp/performance directory.
- class PerformanceTest < ActionDispatch::IntegrationTest
- include ActiveSupport::Testing::Performance
- end
-end
diff --git a/activesupport/CHANGELOG.md b/activesupport/CHANGELOG.md
index 5848f9712f..2c0d1de70f 100644
--- a/activesupport/CHANGELOG.md
+++ b/activesupport/CHANGELOG.md
@@ -1,5 +1,13 @@
## Rails 4.0.0 (unreleased) ##
+* Extract `ActiveSupport::Testing::Performance` into https://github.com/rails/rails-perftest
+ You can add the gem to your Gemfile to keep using performance tests.
+
+ gem 'rails-perftest'
+
+ *Yves Senn*
+
+
* Hash.from_xml raises when it encounters type="symbol" or type="yaml".
Use Hash.from_trusted_xml to parse this XML.
diff --git a/activesupport/lib/active_support/testing/performance.rb b/activesupport/lib/active_support/testing/performance.rb
deleted file mode 100644
index 7102ffe2ed..0000000000
--- a/activesupport/lib/active_support/testing/performance.rb
+++ /dev/null
@@ -1,271 +0,0 @@
-require 'fileutils'
-require 'active_support/concern'
-require 'active_support/core_ext/class/delegating_attributes'
-require 'active_support/core_ext/string/inflections'
-require 'active_support/core_ext/module/delegation'
-require 'active_support/number_helper'
-
-module ActiveSupport
- module Testing
- module Performance
- extend ActiveSupport::Concern
-
- included do
- superclass_delegating_accessor :profile_options
- self.profile_options = {}
- end
-
- # each implementation should define metrics and freeze the defaults
- DEFAULTS =
- if ARGV.include?('--benchmark') # HAX for rake test
- { :runs => 4,
- :output => 'tmp/performance',
- :benchmark => true }
- else
- { :runs => 1,
- :output => 'tmp/performance',
- :benchmark => false }
- end
-
- def full_profile_options
- DEFAULTS.merge(profile_options)
- end
-
- def full_test_name
- "#{self.class.name}##{method_name}"
- end
-
- def run(runner)
- @runner = runner
-
- run_warmup
- if full_profile_options && metrics = full_profile_options[:metrics]
- metrics.each do |metric_name|
- if klass = Metrics[metric_name.to_sym]
- run_profile(klass.new)
- end
- end
- end
-
- return
- end
-
- def run_test(metric, mode)
- result = '.'
- begin
- run_callbacks :setup
- setup
- metric.send(mode) { __send__ method_name }
- rescue Exception => e
- result = @runner.puke(self.class, method_name, e)
- ensure
- begin
- teardown
- run_callbacks :teardown
- rescue Exception => e
- result = @runner.puke(self.class, method_name, e)
- end
- end
- result
- end
-
- protected
- # overridden by each implementation.
- def run_gc; end
-
- def run_warmup
- run_gc
-
- time = Metrics::Time.new
- run_test(time, :benchmark)
- puts "%s (%s warmup)" % [full_test_name, time.format(time.total)]
-
- run_gc
- end
-
- def run_profile(metric)
- klass = full_profile_options[:benchmark] ? Benchmarker : Profiler
- performer = klass.new(self, metric)
-
- performer.run
- puts performer.report
- performer.record
- end
-
- class Performer
- delegate :run_test, :full_profile_options, :full_test_name, :to => :@harness
-
- def initialize(harness, metric)
- @harness, @metric, @supported = harness, metric, false
- end
-
- def report
- if @supported
- rate = @total / full_profile_options[:runs]
- '%20s: %s' % [@metric.name, @metric.format(rate)]
- else
- '%20s: unsupported' % @metric.name
- end
- end
-
- protected
- def output_filename
- "#{full_profile_options[:output]}/#{full_test_name}_#{@metric.name}"
- end
- end
-
- # overridden by each implementation.
- class Profiler < Performer
- def time_with_block
- before = Time.now
- yield
- Time.now - before
- end
-
- def run; end
- def record; end
- end
-
- class Benchmarker < Performer
- def initialize(*args)
- super
- @supported = @metric.respond_to?('measure')
- end
-
- def run
- return unless @supported
-
- full_profile_options[:runs].to_i.times { run_test(@metric, :benchmark) }
- @total = @metric.total
- end
-
- def record
- avg = @metric.total / full_profile_options[:runs].to_i
- now = Time.now.utc.xmlschema
- with_output_file do |file|
- file.puts "#{avg},#{now},#{environment}"
- end
- end
-
- def environment
- @env ||= [].tap do |env|
- env << "#{$1}.#{$2}" if File.directory?('.git') && `git branch -v` =~ /^\* (\S+)\s+(\S+)/
- env << rails_version if defined?(Rails::VERSION::STRING)
- env << "#{RUBY_ENGINE}-#{RUBY_VERSION}.#{RUBY_PATCHLEVEL}"
- env << RUBY_PLATFORM
- end.join(',')
- end
-
- protected
- if defined?(Rails::VERSION::STRING)
- HEADER = 'measurement,created_at,app,rails,ruby,platform'
- else
- HEADER = 'measurement,created_at,app,ruby,platform'
- end
-
- def with_output_file
- fname = output_filename
-
- if new = !File.exist?(fname)
- FileUtils.mkdir_p(File.dirname(fname))
- end
-
- File.open(fname, 'ab') do |file|
- file.puts(HEADER) if new
- yield file
- end
- end
-
- def output_filename
- "#{super}.csv"
- end
-
- def rails_version
- "rails-#{Rails::VERSION::STRING}#{rails_branch}"
- end
-
- def rails_branch
- if File.directory?('vendor/rails/.git')
- Dir.chdir('vendor/rails') do
- ".#{$1}.#{$2}" if `git branch -v` =~ /^\* (\S+)\s+(\S+)/
- end
- end
- end
- end
-
- module Metrics
- def self.[](name)
- const_get(name.to_s.camelize)
- rescue NameError
- nil
- end
-
- class Base
- include ActiveSupport::NumberHelper
-
- attr_reader :total
-
- def initialize
- @total = 0
- end
-
- def name
- @name ||= self.class.name.demodulize.underscore
- end
-
- def benchmark
- with_gc_stats do
- before = measure
- yield
- @total += (measure - before)
- end
- end
-
- # overridden by each implementation.
- def profile; end
-
- protected
- # overridden by each implementation.
- def with_gc_stats; end
- end
-
- class Time < Base
- def measure
- ::Time.now.to_f
- end
-
- def format(measurement)
- if measurement < 1
- '%d ms' % (measurement * 1000)
- else
- '%.2f sec' % measurement
- end
- end
- end
-
- class Amount < Base
- def format(measurement)
- number_to_delimited(measurement.floor)
- end
- end
-
- class DigitalInformationUnit < Base
- def format(measurement)
- number_to_human_size(measurement, :precision => 2)
- end
- end
-
- # each implementation provides its own metrics like ProcessTime, Memory or GcRuns
- end
- end
- end
-end
-
-case RUBY_ENGINE
- when 'ruby' then require 'active_support/testing/performance/ruby'
- when 'rbx' then require 'active_support/testing/performance/rubinius'
- when 'jruby' then require 'active_support/testing/performance/jruby'
- else
- $stderr.puts 'Your ruby interpreter is not supported for benchmarking.'
- exit
-end
diff --git a/activesupport/lib/active_support/testing/performance/jruby.rb b/activesupport/lib/active_support/testing/performance/jruby.rb
deleted file mode 100644
index 34e3f9f45f..0000000000
--- a/activesupport/lib/active_support/testing/performance/jruby.rb
+++ /dev/null
@@ -1,115 +0,0 @@
-require 'jruby/profiler'
-require 'java'
-java_import java.lang.management.ManagementFactory
-
-module ActiveSupport
- module Testing
- module Performance
- DEFAULTS.merge!(
- if ARGV.include?('--benchmark')
- {:metrics => [:wall_time, :user_time, :memory, :gc_runs, :gc_time]}
- else
- { :metrics => [:wall_time],
- :formats => [:flat, :graph] }
- end).freeze
-
- protected
- def run_gc
- ManagementFactory.memory_mx_bean.gc
- end
-
- class Profiler < Performer
- def initialize(*args)
- super
- @supported = @metric.is_a?(Metrics::WallTime)
- end
-
- def run
- return unless @supported
-
- @total = time_with_block do
- @data = JRuby::Profiler.profile do
- full_profile_options[:runs].to_i.times { run_test(@metric, :profile) }
- end
- end
- end
-
- def record
- return unless @supported
-
- klasses = full_profile_options[:formats].map { |f| JRuby::Profiler.const_get("#{f.to_s.camelize}ProfilePrinter") }.compact
-
- klasses.each do |klass|
- fname = output_filename(klass)
- FileUtils.mkdir_p(File.dirname(fname))
- File.open(fname, 'wb') do |file|
- klass.new(@data).printProfile(file)
- end
- end
- end
-
- protected
- def output_filename(printer_class)
- suffix =
- case printer_class.name.demodulize
- when 'FlatProfilePrinter'; 'flat.txt'
- when 'GraphProfilePrinter'; 'graph.txt'
- else printer_class.name.sub(/ProfilePrinter$/, '').underscore
- end
-
- "#{super()}_#{suffix}"
- end
- end
-
- module Metrics
- class Base
- def profile
- yield
- end
-
- protected
- def with_gc_stats
- ManagementFactory.memory_mx_bean.gc
- yield
- end
- end
-
- class WallTime < Time
- def measure
- super
- end
- end
-
- class CpuTime < Time
- def measure
- ManagementFactory.thread_mx_bean.get_current_thread_cpu_time / 1000 / 1000 / 1000.0 # seconds
- end
- end
-
- class UserTime < Time
- def measure
- ManagementFactory.thread_mx_bean.get_current_thread_user_time / 1000 / 1000 / 1000.0 # seconds
- end
- end
-
- class Memory < DigitalInformationUnit
- def measure
- ManagementFactory.memory_mx_bean.non_heap_memory_usage.used + ManagementFactory.memory_mx_bean.heap_memory_usage.used
- end
- end
-
- class GcRuns < Amount
- def measure
- ManagementFactory.garbage_collector_mx_beans.inject(0) { |total_runs, current_gc| total_runs += current_gc.collection_count }
- end
- end
-
- class GcTime < Time
- def measure
- ManagementFactory.garbage_collector_mx_beans.inject(0) { |total_time, current_gc| total_time += current_gc.collection_time } / 1000.0 # seconds
- end
- end
- end
- end
- end
-end
diff --git a/activesupport/lib/active_support/testing/performance/rubinius.rb b/activesupport/lib/active_support/testing/performance/rubinius.rb
deleted file mode 100644
index d9ebfbe352..0000000000
--- a/activesupport/lib/active_support/testing/performance/rubinius.rb
+++ /dev/null
@@ -1,113 +0,0 @@
-require 'rubinius/agent'
-
-module ActiveSupport
- module Testing
- module Performance
- DEFAULTS.merge!(
- if ARGV.include?('--benchmark')
- {:metrics => [:wall_time, :memory, :objects, :gc_runs, :gc_time]}
- else
- { :metrics => [:wall_time],
- :formats => [:flat, :graph] }
- end).freeze
-
- protected
- def run_gc
- GC.run(true)
- end
-
- class Performer; end
-
- class Profiler < Performer
- def initialize(*args)
- super
- @supported = @metric.is_a?(Metrics::WallTime)
- end
-
- def run
- return unless @supported
-
- @profiler = Rubinius::Profiler::Instrumenter.new
-
- @total = time_with_block do
- @profiler.profile(false) do
- full_profile_options[:runs].to_i.times { run_test(@metric, :profile) }
- end
- end
- end
-
- def record
- return unless @supported
-
- if(full_profile_options[:formats].include?(:flat))
- create_path_and_open_file(:flat) do |file|
- @profiler.show(file)
- end
- end
-
- if(full_profile_options[:formats].include?(:graph))
- create_path_and_open_file(:graph) do |file|
- @profiler.show(file)
- end
- end
- end
-
- protected
- def create_path_and_open_file(printer_name)
- fname = "#{output_filename}_#{printer_name}.txt"
- FileUtils.mkdir_p(File.dirname(fname))
- File.open(fname, 'wb') do |file|
- yield(file)
- end
- end
- end
-
- module Metrics
- class Base
- attr_reader :loopback
-
- def profile
- yield
- end
-
- protected
- def with_gc_stats
- @loopback = Rubinius::Agent.loopback
- GC.run(true)
- yield
- end
- end
-
- class WallTime < Time
- def measure
- super
- end
- end
-
- class Memory < DigitalInformationUnit
- def measure
- loopback.get("system.memory.counter.bytes").last
- end
- end
-
- class Objects < Amount
- def measure
- loopback.get("system.memory.counter.objects").last
- end
- end
-
- class GcRuns < Amount
- def measure
- loopback.get("system.gc.full.count").last + loopback.get("system.gc.young.count").last
- end
- end
-
- class GcTime < Time
- def measure
- (loopback.get("system.gc.full.wallclock").last + loopback.get("system.gc.young.wallclock").last) / 1000.0
- end
- end
- end
- end
- end
-end
diff --git a/activesupport/lib/active_support/testing/performance/ruby.rb b/activesupport/lib/active_support/testing/performance/ruby.rb
deleted file mode 100644
index 7c149df1e4..0000000000
--- a/activesupport/lib/active_support/testing/performance/ruby.rb
+++ /dev/null
@@ -1,173 +0,0 @@
-begin
- require 'ruby-prof'
-rescue LoadError
- $stderr.puts 'Specify ruby-prof as application\'s dependency in Gemfile to run benchmarks.'
- raise
-end
-
-module ActiveSupport
- module Testing
- module Performance
- DEFAULTS.merge!(
- if ARGV.include?('--benchmark')
- { :metrics => [:wall_time, :memory, :objects, :gc_runs, :gc_time] }
- else
- { :min_percent => 0.01,
- :metrics => [:process_time, :memory, :objects],
- :formats => [:flat, :graph_html, :call_tree, :call_stack] }
- end).freeze
-
- protected
- remove_method :run_gc
- def run_gc
- GC.start
- end
-
- class Profiler < Performer
- def initialize(*args)
- super
- @supported = @metric.measure_mode rescue false
- end
-
- remove_method :run
- def run
- return unless @supported
-
- RubyProf.measure_mode = @metric.measure_mode
- RubyProf.start
- RubyProf.pause
- full_profile_options[:runs].to_i.times { run_test(@metric, :profile) }
- @data = RubyProf.stop
- @total = @data.threads.sum(0) { |thread| thread.methods.max.total_time }
- end
-
- remove_method :record
- def record
- return unless @supported
-
- klasses = full_profile_options[:formats].map { |f| RubyProf.const_get("#{f.to_s.camelize}Printer") }.compact
-
- klasses.each do |klass|
- fname = output_filename(klass)
- FileUtils.mkdir_p(File.dirname(fname))
- File.open(fname, 'wb') do |file|
- klass.new(@data).print(file, full_profile_options.slice(:min_percent))
- end
- end
- end
-
- protected
- def output_filename(printer_class)
- suffix =
- case printer_class.name.demodulize
- when 'FlatPrinter'; 'flat.txt'
- when 'FlatPrinterWithLineNumbers'; 'flat_line_numbers.txt'
- when 'GraphPrinter'; 'graph.txt'
- when 'GraphHtmlPrinter'; 'graph.html'
- when 'GraphYamlPrinter'; 'graph.yml'
- when 'CallTreePrinter'; 'tree.txt'
- when 'CallStackPrinter'; 'stack.html'
- when 'DotPrinter'; 'graph.dot'
- else printer_class.name.sub(/Printer$/, '').underscore
- end
-
- "#{super()}_#{suffix}"
- end
- end
-
- module Metrics
- class Base
- def measure_mode
- self.class::Mode
- end
-
- remove_method :profile
- def profile
- RubyProf.resume
- yield
- ensure
- RubyProf.pause
- end
-
- protected
- remove_method :with_gc_stats
- def with_gc_stats
- GC::Profiler.enable
- GC.start
- yield
- ensure
- GC::Profiler.disable
- end
- end
-
- class ProcessTime < Time
- Mode = RubyProf::PROCESS_TIME if RubyProf.const_defined?(:PROCESS_TIME)
-
- def measure
- RubyProf.measure_process_time
- end
- end
-
- class WallTime < Time
- Mode = RubyProf::WALL_TIME if RubyProf.const_defined?(:WALL_TIME)
-
- def measure
- RubyProf.measure_wall_time
- end
- end
-
- class CpuTime < Time
- Mode = RubyProf::CPU_TIME if RubyProf.const_defined?(:CPU_TIME)
-
- def initialize(*args)
- # FIXME: yeah my CPU is 2.33 GHz
- RubyProf.cpu_frequency = 2.33e9 unless RubyProf.cpu_frequency > 0
- super
- end
-
- def measure
- RubyProf.measure_cpu_time
- end
- end
-
- class Memory < DigitalInformationUnit
- Mode = RubyProf::MEMORY if RubyProf.const_defined?(:MEMORY)
-
- # Ruby 1.9 + GCdata patch
- if GC.respond_to?(:malloc_allocated_size)
- def measure
- GC.malloc_allocated_size
- end
- end
- end
-
- class Objects < Amount
- Mode = RubyProf::ALLOCATIONS if RubyProf.const_defined?(:ALLOCATIONS)
-
- # Ruby 1.9 + GCdata patch
- if GC.respond_to?(:malloc_allocations)
- def measure
- GC.malloc_allocations
- end
- end
- end
-
- class GcRuns < Amount
- Mode = RubyProf::GC_RUNS if RubyProf.const_defined?(:GC_RUNS)
-
- def measure
- GC.count
- end
- end
-
- class GcTime < Time
- Mode = RubyProf::GC_TIME if RubyProf.const_defined?(:GC_TIME)
-
- def measure
- GC::Profiler.total_time
- end
- end
- end
- end
- end
-end
diff --git a/activesupport/test/testing/performance_test.rb b/activesupport/test/testing/performance_test.rb
deleted file mode 100644
index 6918110cce..0000000000
--- a/activesupport/test/testing/performance_test.rb
+++ /dev/null
@@ -1,68 +0,0 @@
-require 'abstract_unit'
-
-module ActiveSupport
- module Testing
- class PerformanceTest < ActiveSupport::TestCase
- begin
- require 'active_support/testing/performance'
- HAVE_RUBYPROF = true
- rescue LoadError
- HAVE_RUBYPROF = false
- end
-
- def setup
- skip "no rubyprof" unless HAVE_RUBYPROF
- end
-
- def test_amount_format
- amount_metric = ActiveSupport::Testing::Performance::Metrics[:amount].new
- assert_equal "0", amount_metric.format(0)
- assert_equal "1", amount_metric.format(1.23)
- assert_equal "40,000,000", amount_metric.format(40000000)
- end
-
- def test_time_format
- time_metric = ActiveSupport::Testing::Performance::Metrics[:time].new
- assert_equal "0 ms", time_metric.format(0)
- assert_equal "40 ms", time_metric.format(0.04)
- assert_equal "41 ms", time_metric.format(0.0415)
- assert_equal "1.23 sec", time_metric.format(1.23)
- assert_equal "40000.00 sec", time_metric.format(40000)
- assert_equal "-5000 ms", time_metric.format(-5)
- end
-
- def test_space_format
- space_metric = ActiveSupport::Testing::Performance::Metrics[:digital_information_unit].new
- assert_equal "0 Bytes", space_metric.format(0)
- assert_equal "0 Bytes", space_metric.format(0.4)
- assert_equal "1 Byte", space_metric.format(1.23)
- assert_equal "123 Bytes", space_metric.format(123)
- assert_equal "123 Bytes", space_metric.format(123.45)
- assert_equal "12 KB", space_metric.format(12345)
- assert_equal "1.2 MB", space_metric.format(1234567)
- assert_equal "9.3 GB", space_metric.format(10**10)
- assert_equal "91 TB", space_metric.format(10**14)
- assert_equal "910000 TB", space_metric.format(10**18)
- end
-
- def test_environment_format_without_rails
- metric = ActiveSupport::Testing::Performance::Metrics[:time].new
- benchmarker = ActiveSupport::Testing::Performance::Benchmarker.new(self, metric)
- assert_equal "#{RUBY_ENGINE}-#{RUBY_VERSION}.#{RUBY_PATCHLEVEL},#{RUBY_PLATFORM}", benchmarker.environment
- end
-
- def test_environment_format_with_rails
- rails, version = Module.new, Module.new
- version.const_set :STRING, "4.0.0"
- rails.const_set :VERSION, version
- Object.const_set :Rails, rails
-
- metric = ActiveSupport::Testing::Performance::Metrics[:time].new
- benchmarker = ActiveSupport::Testing::Performance::Benchmarker.new(self, metric)
- assert_equal "rails-4.0.0,#{RUBY_ENGINE}-#{RUBY_VERSION}.#{RUBY_PATCHLEVEL},#{RUBY_PLATFORM}", benchmarker.environment
- ensure
- Object.send :remove_const, :Rails
- end
- end
- end
-end
diff --git a/railties/lib/rails/commands/benchmarker.rb b/railties/lib/rails/commands/benchmarker.rb
deleted file mode 100644
index b745b45e17..0000000000
--- a/railties/lib/rails/commands/benchmarker.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-require 'optparse'
-require 'rails/test_help'
-require 'rails/performance_test_help'
-
-ARGV.push('--benchmark') # HAX
-require 'active_support/testing/performance'
-ARGV.pop
-
-def options
- options = {}
- defaults = ActiveSupport::Testing::Performance::DEFAULTS
-
- OptionParser.new do |opt|
- opt.banner = "Usage: rails benchmarker 'Ruby.code' 'Ruby.more_code' ... [OPTS]"
- opt.on('-r', '--runs N', Numeric, 'Number of runs.', "Default: #{defaults[:runs]}") { |r| options[:runs] = r }
- opt.on('-o', '--output PATH', String, 'Directory to use when writing the results.', "Default: #{defaults[:output]}") { |o| options[:output] = o }
- opt.on('-m', '--metrics a,b,c', Array, 'Metrics to use.', "Default: #{defaults[:metrics].join(",")}") { |m| options[:metrics] = m.map(&:to_sym) }
- opt.parse!(ARGV)
- end
-
- options
-end
-
-class BenchmarkerTest < ActionDispatch::PerformanceTest #:nodoc:
- self.profile_options = options
-
- ARGV.each do |expression|
- eval <<-RUBY
- def test_#{expression.parameterize('_')}
- #{expression}
- end
- RUBY
- end
-end
diff --git a/railties/lib/rails/commands/profiler.rb b/railties/lib/rails/commands/profiler.rb
deleted file mode 100644
index 315bcccf61..0000000000
--- a/railties/lib/rails/commands/profiler.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-require 'optparse'
-require 'rails/test_help'
-require 'rails/performance_test_help'
-require 'active_support/testing/performance'
-
-def options
- options = {}
- defaults = ActiveSupport::Testing::Performance::DEFAULTS
-
- OptionParser.new do |opt|
- opt.banner = "Usage: rails profiler 'Ruby.code' 'Ruby.more_code' ... [OPTS]"
- opt.on('-r', '--runs N', Numeric, 'Number of runs.', "Default: #{defaults[:runs]}") { |r| options[:runs] = r }
- opt.on('-o', '--output PATH', String, 'Directory to use when writing the results.', "Default: #{defaults[:output]}") { |o| options[:output] = o }
- opt.on('-m', '--metrics a,b,c', Array, 'Metrics to use.', "Default: #{defaults[:metrics].join(",")}") { |m| options[:metrics] = m.map(&:to_sym) }
- opt.on('-f', '--formats x,y,z', Array, 'Formats to output to.', "Default: #{defaults[:formats].join(",")}") { |m| options[:formats] = m.map(&:to_sym) }
- opt.parse!(ARGV)
- end
-
- options
-end
-
-class ProfilerTest < ActionDispatch::PerformanceTest #:nodoc:
- self.profile_options = options
-
- ARGV.each do |expression|
- eval <<-RUBY
- def test_#{expression.parameterize('_')}
- #{expression}
- end
- RUBY
- end
-end
diff --git a/railties/lib/rails/generators/rails/app/templates/test/performance/browsing_test.rb b/railties/lib/rails/generators/rails/app/templates/test/performance/browsing_test.rb
deleted file mode 100644
index d09ce5ad34..0000000000
--- a/railties/lib/rails/generators/rails/app/templates/test/performance/browsing_test.rb
+++ /dev/null
@@ -1,12 +0,0 @@
-require 'test_helper'
-require 'rails/performance_test_help'
-
-class BrowsingTest < ActionDispatch::PerformanceTest
- # Refer to the documentation for all available options
- # self.profile_options = { runs: 5, metrics: [:wall_time, :memory],
- # output: 'tmp/performance', formats: [:flat] }
-
- test "homepage" do
- get '/'
- end
-end
diff --git a/railties/lib/rails/generators/rails/performance_test/USAGE b/railties/lib/rails/generators/rails/performance_test/USAGE
deleted file mode 100644
index 9dc799559c..0000000000
--- a/railties/lib/rails/generators/rails/performance_test/USAGE
+++ /dev/null
@@ -1,10 +0,0 @@
-Description:
- Stubs out a new performance test. Pass the name of the test, either
- CamelCased or under_scored, as an argument.
-
- This generator invokes the current performance tool, which defaults to
- TestUnit.
-
-Example:
- `rails generate performance_test GeneralStories` creates a GeneralStories
- performance test in test/performance/general_stories_test.rb
diff --git a/railties/lib/rails/generators/rails/performance_test/performance_test_generator.rb b/railties/lib/rails/generators/rails/performance_test/performance_test_generator.rb
deleted file mode 100644
index 56cd562f3d..0000000000
--- a/railties/lib/rails/generators/rails/performance_test/performance_test_generator.rb
+++ /dev/null
@@ -1,7 +0,0 @@
-module Rails
- module Generators
- class PerformanceTestGenerator < NamedBase # :nodoc:
- hook_for :performance_tool, as: :performance
- end
- end
-end
diff --git a/railties/lib/rails/generators/test_unit/performance/performance_generator.rb b/railties/lib/rails/generators/test_unit/performance/performance_generator.rb
deleted file mode 100644
index 5552edeee4..0000000000
--- a/railties/lib/rails/generators/test_unit/performance/performance_generator.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-require 'rails/generators/test_unit'
-
-module TestUnit # :nodoc:
- module Generators # :nodoc:
- class PerformanceGenerator < Base # :nodoc:
- check_class_collision suffix: "Test"
-
- def create_test_files
- template 'performance_test.rb', File.join('test/performance', class_path, "#{file_name}_test.rb")
- end
- end
- end
-end
diff --git a/railties/lib/rails/generators/test_unit/performance/templates/performance_test.rb b/railties/lib/rails/generators/test_unit/performance/templates/performance_test.rb
deleted file mode 100644
index 2bcb482d68..0000000000
--- a/railties/lib/rails/generators/test_unit/performance/templates/performance_test.rb
+++ /dev/null
@@ -1,12 +0,0 @@
-require 'test_helper'
-require 'rails/performance_test_help'
-
-class <%= class_name %>Test < ActionDispatch::PerformanceTest
- # Refer to the documentation for all available options
- # self.profile_options = { runs: 5, metrics: [:wall_time, :memory],
- # output: 'tmp/performance', formats: [:flat] }
-
- test "homepage" do
- get '/'
- end
-end
diff --git a/railties/lib/rails/performance_test_help.rb b/railties/lib/rails/performance_test_help.rb
deleted file mode 100644
index b1285efde2..0000000000
--- a/railties/lib/rails/performance_test_help.rb
+++ /dev/null
@@ -1,3 +0,0 @@
-ActionController::Base.perform_caching = true
-ActiveSupport::Dependencies.mechanism = :require
-Rails.logger.level = ActiveSupport::Logger::INFO
diff --git a/railties/lib/rails/test_unit/testing.rake b/railties/lib/rails/test_unit/testing.rake
index 9ad3a4e6d6..a18d28f958 100644
--- a/railties/lib/rails/test_unit/testing.rake
+++ b/railties/lib/rails/test_unit/testing.rake
@@ -146,15 +146,4 @@ namespace :test do
t.libs << "test"
t.pattern = 'test/integration/**/*_test.rb'
end
-
- Rails::SubTestTask.new(benchmark: 'test:prepare') do |t|
- t.libs << 'test'
- t.pattern = 'test/performance/**/*_test.rb'
- t.options = '-- --benchmark'
- end
-
- Rails::SubTestTask.new(profile: 'test:prepare') do |t|
- t.libs << 'test'
- t.pattern = 'test/performance/**/*_test.rb'
- end
end
diff --git a/railties/test/generators/performance_test_generator_test.rb b/railties/test/generators/performance_test_generator_test.rb
deleted file mode 100644
index 37f9857193..0000000000
--- a/railties/test/generators/performance_test_generator_test.rb
+++ /dev/null
@@ -1,12 +0,0 @@
-require 'generators/generators_test_helper'
-require 'rails/generators/rails/performance_test/performance_test_generator'
-
-class PerformanceTestGeneratorTest < Rails::Generators::TestCase
- include GeneratorsTestHelper
- arguments %w(performance)
-
- def test_performance_test_skeleton_is_created
- run_generator
- assert_file "test/performance/performance_test.rb", /class PerformanceTest < ActionDispatch::PerformanceTest/
- end
-end