aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.travis.yml2
-rw-r--r--actionpack/CHANGELOG.md7
-rw-r--r--actionpack/lib/action_controller.rb1
-rw-r--r--actionpack/lib/action_controller/caching.rb6
-rw-r--r--actionpack/lib/action_controller/deprecated/performance_test.rb3
-rw-r--r--actionpack/lib/action_dispatch.rb1
-rw-r--r--actionpack/lib/action_dispatch/testing/performance_test.rb10
-rw-r--r--actionpack/lib/action_view/helpers/url_helper.rb2
-rw-r--r--actionpack/test/controller/caching_test.rb4
-rw-r--r--activesupport/CHANGELOG.md8
-rw-r--r--activesupport/lib/active_support/testing/performance.rb271
-rw-r--r--activesupport/lib/active_support/testing/performance/jruby.rb115
-rw-r--r--activesupport/lib/active_support/testing/performance/rubinius.rb113
-rw-r--r--activesupport/lib/active_support/testing/performance/ruby.rb173
-rw-r--r--activesupport/test/testing/performance_test.rb68
-rw-r--r--guides/code/getting_started/README.rdoc2
-rw-r--r--guides/code/getting_started/test/performance/browsing_test.rb12
-rw-r--r--guides/source/4_0_release_notes.md1
-rw-r--r--guides/source/configuring.md1
-rw-r--r--guides/source/documents.yaml4
-rw-r--r--guides/source/performance_testing.md676
-rw-r--r--guides/source/testing.md6
-rw-r--r--rails.gemspec2
-rw-r--r--railties/lib/rails/commands.rb7
-rw-r--r--railties/lib/rails/commands/benchmarker.rb34
-rw-r--r--railties/lib/rails/commands/profiler.rb32
-rw-r--r--railties/lib/rails/generators.rb2
-rw-r--r--railties/lib/rails/generators/rails/app/app_generator.rb3
-rw-r--r--railties/lib/rails/generators/rails/app/templates/Gemfile6
-rw-r--r--railties/lib/rails/generators/rails/app/templates/test/performance/browsing_test.rb12
-rw-r--r--railties/lib/rails/generators/rails/performance_test/USAGE10
-rw-r--r--railties/lib/rails/generators/rails/performance_test/performance_test_generator.rb7
-rw-r--r--railties/lib/rails/generators/rails/scaffold/scaffold_generator.rb7
-rw-r--r--railties/lib/rails/generators/rails/scaffold_controller/scaffold_controller_generator.rb9
-rw-r--r--railties/lib/rails/generators/rails/scaffold_controller/templates/controller.rb66
-rw-r--r--railties/lib/rails/generators/test_unit/performance/performance_generator.rb13
-rw-r--r--railties/lib/rails/generators/test_unit/performance/templates/performance_test.rb12
-rw-r--r--railties/lib/rails/performance_test_help.rb3
-rw-r--r--railties/lib/rails/test_unit/railtie.rb1
-rw-r--r--railties/lib/rails/test_unit/testing.rake13
-rw-r--r--railties/test/application/test_test.rb25
-rw-r--r--railties/test/generators/app_generator_test.rb3
-rw-r--r--railties/test/generators/performance_test_generator_test.rb12
-rw-r--r--railties/test/generators/scaffold_controller_generator_test.rb16
-rw-r--r--railties/test/generators/scaffold_generator_test.rb9
45 files changed, 41 insertions, 1749 deletions
diff --git a/.travis.yml b/.travis.yml
index 257a6dc7e7..18b0100c62 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,6 +1,6 @@
script: 'ci/travis.rb'
before_install:
- - gem install bundler --pre
+ - gem install bundler
rvm:
- 1.9.3
- 2.0.0
diff --git a/actionpack/CHANGELOG.md b/actionpack/CHANGELOG.md
index 1a8f187979..7dea0e7fa5 100644
--- a/actionpack/CHANGELOG.md
+++ b/actionpack/CHANGELOG.md
@@ -1,5 +1,12 @@
## Rails 4.0.0 (unreleased) ##
+* Extract `ActionDispatch::PerformanceTest` into https://github.com/rails/rails-perftest
+ You can add the gem to your Gemfile to keep using performance tests.
+
+ gem 'rails-perftest'
+
+ *Yves Senn*
+
* Added view_cache_dependency API for declaring dependencies that affect
cache digest computation.
diff --git a/actionpack/lib/action_controller.rb b/actionpack/lib/action_controller.rb
index 1a13d7af29..9cacb3862b 100644
--- a/actionpack/lib/action_controller.rb
+++ b/actionpack/lib/action_controller.rb
@@ -42,7 +42,6 @@ module ActionController
autoload :Integration, 'action_controller/deprecated/integration_test'
autoload :IntegrationTest, 'action_controller/deprecated/integration_test'
- autoload :PerformanceTest, 'action_controller/deprecated/performance_test'
autoload :Routing, 'action_controller/deprecated'
autoload :TestCase, 'action_controller/test_case'
autoload :TemplateAssertions, 'action_controller/test_case'
diff --git a/actionpack/lib/action_controller/caching.rb b/actionpack/lib/action_controller/caching.rb
index 0f4cced32a..cf2cda039d 100644
--- a/actionpack/lib/action_controller/caching.rb
+++ b/actionpack/lib/action_controller/caching.rb
@@ -80,10 +80,6 @@ module ActionController
def view_cache_dependency(&dependency)
self._view_cache_dependencies += [dependency]
end
-
- def view_cache_dependencies
- _view_cache_dependencies.map { |dep| instance_exec(&dep) }.compact
- end
end
def caching_allowed?
@@ -91,7 +87,7 @@ module ActionController
end
def view_cache_dependencies
- self.class.view_cache_dependencies
+ self.class._view_cache_dependencies.map { |dep| instance_exec(&dep) }.compact
end
protected
diff --git a/actionpack/lib/action_controller/deprecated/performance_test.rb b/actionpack/lib/action_controller/deprecated/performance_test.rb
deleted file mode 100644
index c7ba5a2fe7..0000000000
--- a/actionpack/lib/action_controller/deprecated/performance_test.rb
+++ /dev/null
@@ -1,3 +0,0 @@
-ActionController::PerformanceTest = ActionDispatch::PerformanceTest
-
-ActiveSupport::Deprecation.warn 'ActionController::PerformanceTest is deprecated and will be removed, use ActionDispatch::PerformanceTest instead.'
diff --git a/actionpack/lib/action_dispatch.rb b/actionpack/lib/action_dispatch.rb
index b35761fb4a..9ab048b756 100644
--- a/actionpack/lib/action_dispatch.rb
+++ b/actionpack/lib/action_dispatch.rb
@@ -97,7 +97,6 @@ module ActionDispatch
autoload :Assertions
autoload :Integration
autoload :IntegrationTest, 'action_dispatch/testing/integration'
- autoload :PerformanceTest
autoload :TestProcess
autoload :TestRequest
autoload :TestResponse
diff --git a/actionpack/lib/action_dispatch/testing/performance_test.rb b/actionpack/lib/action_dispatch/testing/performance_test.rb
deleted file mode 100644
index 13fe693c32..0000000000
--- a/actionpack/lib/action_dispatch/testing/performance_test.rb
+++ /dev/null
@@ -1,10 +0,0 @@
-require 'active_support/testing/performance'
-
-module ActionDispatch
- # An integration test that runs a code profiler on your test methods.
- # Profiling output for combinations of each test method, measurement, and
- # output format are written to your tmp/performance directory.
- class PerformanceTest < ActionDispatch::IntegrationTest
- include ActiveSupport::Testing::Performance
- end
-end
diff --git a/actionpack/lib/action_view/helpers/url_helper.rb b/actionpack/lib/action_view/helpers/url_helper.rb
index aeee662071..bade121d44 100644
--- a/actionpack/lib/action_view/helpers/url_helper.rb
+++ b/actionpack/lib/action_view/helpers/url_helper.rb
@@ -170,7 +170,7 @@ module ActionView
# You can also use custom data attributes using the <tt>:data</tt> option:
#
# link_to "Visit Other Site", "http://www.rubyonrails.org/", data: { confirm: "Are you sure?" }
- # # => <a href="http://www.rubyonrails.org/" data-confirm="Are you sure?"">Visit Other Site</a>
+ # # => <a href="http://www.rubyonrails.org/" data-confirm="Are you sure?">Visit Other Site</a>
def link_to(name = nil, options = nil, html_options = nil, &block)
html_options, options = options, name if block_given?
options ||= {}
diff --git a/actionpack/test/controller/caching_test.rb b/actionpack/test/controller/caching_test.rb
index eb5eeb0423..ca86837a2c 100644
--- a/actionpack/test/controller/caching_test.rb
+++ b/actionpack/test/controller/caching_test.rb
@@ -306,11 +306,11 @@ class ViewCacheDependencyTest < ActionController::TestCase
end
def test_view_cache_dependencies_are_empty_by_default
- assert NoDependenciesController.view_cache_dependencies.empty?
+ assert NoDependenciesController.new.view_cache_dependencies.empty?
end
def test_view_cache_dependencies_are_listed_in_declaration_order
- assert_equal %w(trombone flute), HasDependenciesController.view_cache_dependencies
+ assert_equal %w(trombone flute), HasDependenciesController.new.view_cache_dependencies
end
end
diff --git a/activesupport/CHANGELOG.md b/activesupport/CHANGELOG.md
index 5848f9712f..2c0d1de70f 100644
--- a/activesupport/CHANGELOG.md
+++ b/activesupport/CHANGELOG.md
@@ -1,5 +1,13 @@
## Rails 4.0.0 (unreleased) ##
+* Extract `ActiveSupport::Testing::Performance` into https://github.com/rails/rails-perftest
+ You can add the gem to your Gemfile to keep using performance tests.
+
+ gem 'rails-perftest'
+
+ *Yves Senn*
+
+
* Hash.from_xml raises when it encounters type="symbol" or type="yaml".
Use Hash.from_trusted_xml to parse this XML.
diff --git a/activesupport/lib/active_support/testing/performance.rb b/activesupport/lib/active_support/testing/performance.rb
deleted file mode 100644
index 7102ffe2ed..0000000000
--- a/activesupport/lib/active_support/testing/performance.rb
+++ /dev/null
@@ -1,271 +0,0 @@
-require 'fileutils'
-require 'active_support/concern'
-require 'active_support/core_ext/class/delegating_attributes'
-require 'active_support/core_ext/string/inflections'
-require 'active_support/core_ext/module/delegation'
-require 'active_support/number_helper'
-
-module ActiveSupport
- module Testing
- module Performance
- extend ActiveSupport::Concern
-
- included do
- superclass_delegating_accessor :profile_options
- self.profile_options = {}
- end
-
- # each implementation should define metrics and freeze the defaults
- DEFAULTS =
- if ARGV.include?('--benchmark') # HAX for rake test
- { :runs => 4,
- :output => 'tmp/performance',
- :benchmark => true }
- else
- { :runs => 1,
- :output => 'tmp/performance',
- :benchmark => false }
- end
-
- def full_profile_options
- DEFAULTS.merge(profile_options)
- end
-
- def full_test_name
- "#{self.class.name}##{method_name}"
- end
-
- def run(runner)
- @runner = runner
-
- run_warmup
- if full_profile_options && metrics = full_profile_options[:metrics]
- metrics.each do |metric_name|
- if klass = Metrics[metric_name.to_sym]
- run_profile(klass.new)
- end
- end
- end
-
- return
- end
-
- def run_test(metric, mode)
- result = '.'
- begin
- run_callbacks :setup
- setup
- metric.send(mode) { __send__ method_name }
- rescue Exception => e
- result = @runner.puke(self.class, method_name, e)
- ensure
- begin
- teardown
- run_callbacks :teardown
- rescue Exception => e
- result = @runner.puke(self.class, method_name, e)
- end
- end
- result
- end
-
- protected
- # overridden by each implementation.
- def run_gc; end
-
- def run_warmup
- run_gc
-
- time = Metrics::Time.new
- run_test(time, :benchmark)
- puts "%s (%s warmup)" % [full_test_name, time.format(time.total)]
-
- run_gc
- end
-
- def run_profile(metric)
- klass = full_profile_options[:benchmark] ? Benchmarker : Profiler
- performer = klass.new(self, metric)
-
- performer.run
- puts performer.report
- performer.record
- end
-
- class Performer
- delegate :run_test, :full_profile_options, :full_test_name, :to => :@harness
-
- def initialize(harness, metric)
- @harness, @metric, @supported = harness, metric, false
- end
-
- def report
- if @supported
- rate = @total / full_profile_options[:runs]
- '%20s: %s' % [@metric.name, @metric.format(rate)]
- else
- '%20s: unsupported' % @metric.name
- end
- end
-
- protected
- def output_filename
- "#{full_profile_options[:output]}/#{full_test_name}_#{@metric.name}"
- end
- end
-
- # overridden by each implementation.
- class Profiler < Performer
- def time_with_block
- before = Time.now
- yield
- Time.now - before
- end
-
- def run; end
- def record; end
- end
-
- class Benchmarker < Performer
- def initialize(*args)
- super
- @supported = @metric.respond_to?('measure')
- end
-
- def run
- return unless @supported
-
- full_profile_options[:runs].to_i.times { run_test(@metric, :benchmark) }
- @total = @metric.total
- end
-
- def record
- avg = @metric.total / full_profile_options[:runs].to_i
- now = Time.now.utc.xmlschema
- with_output_file do |file|
- file.puts "#{avg},#{now},#{environment}"
- end
- end
-
- def environment
- @env ||= [].tap do |env|
- env << "#{$1}.#{$2}" if File.directory?('.git') && `git branch -v` =~ /^\* (\S+)\s+(\S+)/
- env << rails_version if defined?(Rails::VERSION::STRING)
- env << "#{RUBY_ENGINE}-#{RUBY_VERSION}.#{RUBY_PATCHLEVEL}"
- env << RUBY_PLATFORM
- end.join(',')
- end
-
- protected
- if defined?(Rails::VERSION::STRING)
- HEADER = 'measurement,created_at,app,rails,ruby,platform'
- else
- HEADER = 'measurement,created_at,app,ruby,platform'
- end
-
- def with_output_file
- fname = output_filename
-
- if new = !File.exist?(fname)
- FileUtils.mkdir_p(File.dirname(fname))
- end
-
- File.open(fname, 'ab') do |file|
- file.puts(HEADER) if new
- yield file
- end
- end
-
- def output_filename
- "#{super}.csv"
- end
-
- def rails_version
- "rails-#{Rails::VERSION::STRING}#{rails_branch}"
- end
-
- def rails_branch
- if File.directory?('vendor/rails/.git')
- Dir.chdir('vendor/rails') do
- ".#{$1}.#{$2}" if `git branch -v` =~ /^\* (\S+)\s+(\S+)/
- end
- end
- end
- end
-
- module Metrics
- def self.[](name)
- const_get(name.to_s.camelize)
- rescue NameError
- nil
- end
-
- class Base
- include ActiveSupport::NumberHelper
-
- attr_reader :total
-
- def initialize
- @total = 0
- end
-
- def name
- @name ||= self.class.name.demodulize.underscore
- end
-
- def benchmark
- with_gc_stats do
- before = measure
- yield
- @total += (measure - before)
- end
- end
-
- # overridden by each implementation.
- def profile; end
-
- protected
- # overridden by each implementation.
- def with_gc_stats; end
- end
-
- class Time < Base
- def measure
- ::Time.now.to_f
- end
-
- def format(measurement)
- if measurement < 1
- '%d ms' % (measurement * 1000)
- else
- '%.2f sec' % measurement
- end
- end
- end
-
- class Amount < Base
- def format(measurement)
- number_to_delimited(measurement.floor)
- end
- end
-
- class DigitalInformationUnit < Base
- def format(measurement)
- number_to_human_size(measurement, :precision => 2)
- end
- end
-
- # each implementation provides its own metrics like ProcessTime, Memory or GcRuns
- end
- end
- end
-end
-
-case RUBY_ENGINE
- when 'ruby' then require 'active_support/testing/performance/ruby'
- when 'rbx' then require 'active_support/testing/performance/rubinius'
- when 'jruby' then require 'active_support/testing/performance/jruby'
- else
- $stderr.puts 'Your ruby interpreter is not supported for benchmarking.'
- exit
-end
diff --git a/activesupport/lib/active_support/testing/performance/jruby.rb b/activesupport/lib/active_support/testing/performance/jruby.rb
deleted file mode 100644
index 34e3f9f45f..0000000000
--- a/activesupport/lib/active_support/testing/performance/jruby.rb
+++ /dev/null
@@ -1,115 +0,0 @@
-require 'jruby/profiler'
-require 'java'
-java_import java.lang.management.ManagementFactory
-
-module ActiveSupport
- module Testing
- module Performance
- DEFAULTS.merge!(
- if ARGV.include?('--benchmark')
- {:metrics => [:wall_time, :user_time, :memory, :gc_runs, :gc_time]}
- else
- { :metrics => [:wall_time],
- :formats => [:flat, :graph] }
- end).freeze
-
- protected
- def run_gc
- ManagementFactory.memory_mx_bean.gc
- end
-
- class Profiler < Performer
- def initialize(*args)
- super
- @supported = @metric.is_a?(Metrics::WallTime)
- end
-
- def run
- return unless @supported
-
- @total = time_with_block do
- @data = JRuby::Profiler.profile do
- full_profile_options[:runs].to_i.times { run_test(@metric, :profile) }
- end
- end
- end
-
- def record
- return unless @supported
-
- klasses = full_profile_options[:formats].map { |f| JRuby::Profiler.const_get("#{f.to_s.camelize}ProfilePrinter") }.compact
-
- klasses.each do |klass|
- fname = output_filename(klass)
- FileUtils.mkdir_p(File.dirname(fname))
- File.open(fname, 'wb') do |file|
- klass.new(@data).printProfile(file)
- end
- end
- end
-
- protected
- def output_filename(printer_class)
- suffix =
- case printer_class.name.demodulize
- when 'FlatProfilePrinter'; 'flat.txt'
- when 'GraphProfilePrinter'; 'graph.txt'
- else printer_class.name.sub(/ProfilePrinter$/, '').underscore
- end
-
- "#{super()}_#{suffix}"
- end
- end
-
- module Metrics
- class Base
- def profile
- yield
- end
-
- protected
- def with_gc_stats
- ManagementFactory.memory_mx_bean.gc
- yield
- end
- end
-
- class WallTime < Time
- def measure
- super
- end
- end
-
- class CpuTime < Time
- def measure
- ManagementFactory.thread_mx_bean.get_current_thread_cpu_time / 1000 / 1000 / 1000.0 # seconds
- end
- end
-
- class UserTime < Time
- def measure
- ManagementFactory.thread_mx_bean.get_current_thread_user_time / 1000 / 1000 / 1000.0 # seconds
- end
- end
-
- class Memory < DigitalInformationUnit
- def measure
- ManagementFactory.memory_mx_bean.non_heap_memory_usage.used + ManagementFactory.memory_mx_bean.heap_memory_usage.used
- end
- end
-
- class GcRuns < Amount
- def measure
- ManagementFactory.garbage_collector_mx_beans.inject(0) { |total_runs, current_gc| total_runs += current_gc.collection_count }
- end
- end
-
- class GcTime < Time
- def measure
- ManagementFactory.garbage_collector_mx_beans.inject(0) { |total_time, current_gc| total_time += current_gc.collection_time } / 1000.0 # seconds
- end
- end
- end
- end
- end
-end
diff --git a/activesupport/lib/active_support/testing/performance/rubinius.rb b/activesupport/lib/active_support/testing/performance/rubinius.rb
deleted file mode 100644
index d9ebfbe352..0000000000
--- a/activesupport/lib/active_support/testing/performance/rubinius.rb
+++ /dev/null
@@ -1,113 +0,0 @@
-require 'rubinius/agent'
-
-module ActiveSupport
- module Testing
- module Performance
- DEFAULTS.merge!(
- if ARGV.include?('--benchmark')
- {:metrics => [:wall_time, :memory, :objects, :gc_runs, :gc_time]}
- else
- { :metrics => [:wall_time],
- :formats => [:flat, :graph] }
- end).freeze
-
- protected
- def run_gc
- GC.run(true)
- end
-
- class Performer; end
-
- class Profiler < Performer
- def initialize(*args)
- super
- @supported = @metric.is_a?(Metrics::WallTime)
- end
-
- def run
- return unless @supported
-
- @profiler = Rubinius::Profiler::Instrumenter.new
-
- @total = time_with_block do
- @profiler.profile(false) do
- full_profile_options[:runs].to_i.times { run_test(@metric, :profile) }
- end
- end
- end
-
- def record
- return unless @supported
-
- if(full_profile_options[:formats].include?(:flat))
- create_path_and_open_file(:flat) do |file|
- @profiler.show(file)
- end
- end
-
- if(full_profile_options[:formats].include?(:graph))
- create_path_and_open_file(:graph) do |file|
- @profiler.show(file)
- end
- end
- end
-
- protected
- def create_path_and_open_file(printer_name)
- fname = "#{output_filename}_#{printer_name}.txt"
- FileUtils.mkdir_p(File.dirname(fname))
- File.open(fname, 'wb') do |file|
- yield(file)
- end
- end
- end
-
- module Metrics
- class Base
- attr_reader :loopback
-
- def profile
- yield
- end
-
- protected
- def with_gc_stats
- @loopback = Rubinius::Agent.loopback
- GC.run(true)
- yield
- end
- end
-
- class WallTime < Time
- def measure
- super
- end
- end
-
- class Memory < DigitalInformationUnit
- def measure
- loopback.get("system.memory.counter.bytes").last
- end
- end
-
- class Objects < Amount
- def measure
- loopback.get("system.memory.counter.objects").last
- end
- end
-
- class GcRuns < Amount
- def measure
- loopback.get("system.gc.full.count").last + loopback.get("system.gc.young.count").last
- end
- end
-
- class GcTime < Time
- def measure
- (loopback.get("system.gc.full.wallclock").last + loopback.get("system.gc.young.wallclock").last) / 1000.0
- end
- end
- end
- end
- end
-end
diff --git a/activesupport/lib/active_support/testing/performance/ruby.rb b/activesupport/lib/active_support/testing/performance/ruby.rb
deleted file mode 100644
index 7c149df1e4..0000000000
--- a/activesupport/lib/active_support/testing/performance/ruby.rb
+++ /dev/null
@@ -1,173 +0,0 @@
-begin
- require 'ruby-prof'
-rescue LoadError
- $stderr.puts 'Specify ruby-prof as application\'s dependency in Gemfile to run benchmarks.'
- raise
-end
-
-module ActiveSupport
- module Testing
- module Performance
- DEFAULTS.merge!(
- if ARGV.include?('--benchmark')
- { :metrics => [:wall_time, :memory, :objects, :gc_runs, :gc_time] }
- else
- { :min_percent => 0.01,
- :metrics => [:process_time, :memory, :objects],
- :formats => [:flat, :graph_html, :call_tree, :call_stack] }
- end).freeze
-
- protected
- remove_method :run_gc
- def run_gc
- GC.start
- end
-
- class Profiler < Performer
- def initialize(*args)
- super
- @supported = @metric.measure_mode rescue false
- end
-
- remove_method :run
- def run
- return unless @supported
-
- RubyProf.measure_mode = @metric.measure_mode
- RubyProf.start
- RubyProf.pause
- full_profile_options[:runs].to_i.times { run_test(@metric, :profile) }
- @data = RubyProf.stop
- @total = @data.threads.sum(0) { |thread| thread.methods.max.total_time }
- end
-
- remove_method :record
- def record
- return unless @supported
-
- klasses = full_profile_options[:formats].map { |f| RubyProf.const_get("#{f.to_s.camelize}Printer") }.compact
-
- klasses.each do |klass|
- fname = output_filename(klass)
- FileUtils.mkdir_p(File.dirname(fname))
- File.open(fname, 'wb') do |file|
- klass.new(@data).print(file, full_profile_options.slice(:min_percent))
- end
- end
- end
-
- protected
- def output_filename(printer_class)
- suffix =
- case printer_class.name.demodulize
- when 'FlatPrinter'; 'flat.txt'
- when 'FlatPrinterWithLineNumbers'; 'flat_line_numbers.txt'
- when 'GraphPrinter'; 'graph.txt'
- when 'GraphHtmlPrinter'; 'graph.html'
- when 'GraphYamlPrinter'; 'graph.yml'
- when 'CallTreePrinter'; 'tree.txt'
- when 'CallStackPrinter'; 'stack.html'
- when 'DotPrinter'; 'graph.dot'
- else printer_class.name.sub(/Printer$/, '').underscore
- end
-
- "#{super()}_#{suffix}"
- end
- end
-
- module Metrics
- class Base
- def measure_mode
- self.class::Mode
- end
-
- remove_method :profile
- def profile
- RubyProf.resume
- yield
- ensure
- RubyProf.pause
- end
-
- protected
- remove_method :with_gc_stats
- def with_gc_stats
- GC::Profiler.enable
- GC.start
- yield
- ensure
- GC::Profiler.disable
- end
- end
-
- class ProcessTime < Time
- Mode = RubyProf::PROCESS_TIME if RubyProf.const_defined?(:PROCESS_TIME)
-
- def measure
- RubyProf.measure_process_time
- end
- end
-
- class WallTime < Time
- Mode = RubyProf::WALL_TIME if RubyProf.const_defined?(:WALL_TIME)
-
- def measure
- RubyProf.measure_wall_time
- end
- end
-
- class CpuTime < Time
- Mode = RubyProf::CPU_TIME if RubyProf.const_defined?(:CPU_TIME)
-
- def initialize(*args)
- # FIXME: yeah my CPU is 2.33 GHz
- RubyProf.cpu_frequency = 2.33e9 unless RubyProf.cpu_frequency > 0
- super
- end
-
- def measure
- RubyProf.measure_cpu_time
- end
- end
-
- class Memory < DigitalInformationUnit
- Mode = RubyProf::MEMORY if RubyProf.const_defined?(:MEMORY)
-
- # Ruby 1.9 + GCdata patch
- if GC.respond_to?(:malloc_allocated_size)
- def measure
- GC.malloc_allocated_size
- end
- end
- end
-
- class Objects < Amount
- Mode = RubyProf::ALLOCATIONS if RubyProf.const_defined?(:ALLOCATIONS)
-
- # Ruby 1.9 + GCdata patch
- if GC.respond_to?(:malloc_allocations)
- def measure
- GC.malloc_allocations
- end
- end
- end
-
- class GcRuns < Amount
- Mode = RubyProf::GC_RUNS if RubyProf.const_defined?(:GC_RUNS)
-
- def measure
- GC.count
- end
- end
-
- class GcTime < Time
- Mode = RubyProf::GC_TIME if RubyProf.const_defined?(:GC_TIME)
-
- def measure
- GC::Profiler.total_time
- end
- end
- end
- end
- end
-end
diff --git a/activesupport/test/testing/performance_test.rb b/activesupport/test/testing/performance_test.rb
deleted file mode 100644
index 6918110cce..0000000000
--- a/activesupport/test/testing/performance_test.rb
+++ /dev/null
@@ -1,68 +0,0 @@
-require 'abstract_unit'
-
-module ActiveSupport
- module Testing
- class PerformanceTest < ActiveSupport::TestCase
- begin
- require 'active_support/testing/performance'
- HAVE_RUBYPROF = true
- rescue LoadError
- HAVE_RUBYPROF = false
- end
-
- def setup
- skip "no rubyprof" unless HAVE_RUBYPROF
- end
-
- def test_amount_format
- amount_metric = ActiveSupport::Testing::Performance::Metrics[:amount].new
- assert_equal "0", amount_metric.format(0)
- assert_equal "1", amount_metric.format(1.23)
- assert_equal "40,000,000", amount_metric.format(40000000)
- end
-
- def test_time_format
- time_metric = ActiveSupport::Testing::Performance::Metrics[:time].new
- assert_equal "0 ms", time_metric.format(0)
- assert_equal "40 ms", time_metric.format(0.04)
- assert_equal "41 ms", time_metric.format(0.0415)
- assert_equal "1.23 sec", time_metric.format(1.23)
- assert_equal "40000.00 sec", time_metric.format(40000)
- assert_equal "-5000 ms", time_metric.format(-5)
- end
-
- def test_space_format
- space_metric = ActiveSupport::Testing::Performance::Metrics[:digital_information_unit].new
- assert_equal "0 Bytes", space_metric.format(0)
- assert_equal "0 Bytes", space_metric.format(0.4)
- assert_equal "1 Byte", space_metric.format(1.23)
- assert_equal "123 Bytes", space_metric.format(123)
- assert_equal "123 Bytes", space_metric.format(123.45)
- assert_equal "12 KB", space_metric.format(12345)
- assert_equal "1.2 MB", space_metric.format(1234567)
- assert_equal "9.3 GB", space_metric.format(10**10)
- assert_equal "91 TB", space_metric.format(10**14)
- assert_equal "910000 TB", space_metric.format(10**18)
- end
-
- def test_environment_format_without_rails
- metric = ActiveSupport::Testing::Performance::Metrics[:time].new
- benchmarker = ActiveSupport::Testing::Performance::Benchmarker.new(self, metric)
- assert_equal "#{RUBY_ENGINE}-#{RUBY_VERSION}.#{RUBY_PATCHLEVEL},#{RUBY_PLATFORM}", benchmarker.environment
- end
-
- def test_environment_format_with_rails
- rails, version = Module.new, Module.new
- version.const_set :STRING, "4.0.0"
- rails.const_set :VERSION, version
- Object.const_set :Rails, rails
-
- metric = ActiveSupport::Testing::Performance::Metrics[:time].new
- benchmarker = ActiveSupport::Testing::Performance::Benchmarker.new(self, metric)
- assert_equal "rails-4.0.0,#{RUBY_ENGINE}-#{RUBY_VERSION}.#{RUBY_PATCHLEVEL},#{RUBY_PLATFORM}", benchmarker.environment
- ensure
- Object.send :remove_const, :Rails
- end
- end
- end
-end
diff --git a/guides/code/getting_started/README.rdoc b/guides/code/getting_started/README.rdoc
index 8d1b0f42e5..232856ca5f 100644
--- a/guides/code/getting_started/README.rdoc
+++ b/guides/code/getting_started/README.rdoc
@@ -25,4 +25,4 @@ Things you may want to cover:
If you plan to generate application documentation with `rake doc:app` this file
is expected to be `README.rdoc`, otherwise please feel free to rename it and use
-a different markup language.
+a different markup language. \ No newline at end of file
diff --git a/guides/code/getting_started/test/performance/browsing_test.rb b/guides/code/getting_started/test/performance/browsing_test.rb
deleted file mode 100644
index 9342a57b20..0000000000
--- a/guides/code/getting_started/test/performance/browsing_test.rb
+++ /dev/null
@@ -1,12 +0,0 @@
-require 'test_helper'
-require 'rails/performance_test_help'
-
-class BrowsingTest < ActionDispatch::PerformanceTest
- # Refer to the documentation for all available options
- # self.profile_options = { runs: 5, metrics: [:wall_time, :memory],
- # output: 'tmp/performance', formats: [:flat] }
-
- def test_homepage
- get '/'
- end
-end
diff --git a/guides/source/4_0_release_notes.md b/guides/source/4_0_release_notes.md
index 2242b2699f..52571fab60 100644
--- a/guides/source/4_0_release_notes.md
+++ b/guides/source/4_0_release_notes.md
@@ -68,6 +68,7 @@ In Rails 4.0, several features have been extracted into gems. You can simply add
* Action Caching ([Github](https://github.com/rails/actionpack-action_caching), [Pull Request](https://github.com/rails/rails/pull/7833))
* Page Caching ([Github](https://github.com/rails/actionpack-page_caching), [Pull Request](https://github.com/rails/rails/pull/7833))
* Sprockets ([Github](https://github.com/rails/sprockets-rails))
+* Performance tests ([Github](https://github.com/rails/rails-perftest), [Pull Request](https://github.com/rails/rails/pull/8876))
Documentation
-------------
diff --git a/guides/source/configuring.md b/guides/source/configuring.md
index ced04bbbc0..cd0b99b177 100644
--- a/guides/source/configuring.md
+++ b/guides/source/configuring.md
@@ -177,7 +177,6 @@ The full set of methods that can be used in this block are as follows:
* `javascripts` turns on the hook for JavaScript files in generators. Used in Rails for when the `scaffold` generator is run. Defaults to `true`.
* `javascript_engine` configures the engine to be used (for eg. coffee) when generating assets. Defaults to `nil`.
* `orm` defines which orm to use. Defaults to `false` and will use Active Record by default.
-* `performance_tool` defines which performance tool to use. Defaults to `nil`.
* `resource_controller` defines which generator to use for generating a controller when using `rails generate resource`. Defaults to `:controller`.
* `scaffold_controller` different from `resource_controller`, defines which generator to use for generating a _scaffolded_ controller when using `rails generate scaffold`. Defaults to `:scaffold_controller`.
* `stylesheets` turns on the hook for stylesheets in generators. Used in Rails for when the `scaffold` generator is run, but this hook can be used in other generates as well. Defaults to `true`.
diff --git a/guides/source/documents.yaml b/guides/source/documents.yaml
index e779407fab..13f982d7e2 100644
--- a/guides/source/documents.yaml
+++ b/guides/source/documents.yaml
@@ -84,10 +84,6 @@
url: debugging_rails_applications.html
description: This guide describes how to debug Rails applications. It covers the different ways of achieving this and how to understand what is happening "behind the scenes" of your code.
-
- name: Performance Testing Rails Applications
- url: performance_testing.html
- description: This guide covers the various ways of performance testing a Ruby on Rails application.
- -
name: Configuring Rails Applications
url: configuring.html
description: This guide covers the basic configuration settings for a Rails application.
diff --git a/guides/source/performance_testing.md b/guides/source/performance_testing.md
deleted file mode 100644
index 6bc175d3b6..0000000000
--- a/guides/source/performance_testing.md
+++ /dev/null
@@ -1,676 +0,0 @@
-Performance Testing Rails Applications
-======================================
-
-This guide covers the various ways of performance testing a Ruby on Rails
-application.
-
-After reading this guide, you will know:
-
-* The various types of benchmarking and profiling metrics.
-* How to generate performance and benchmarking tests.
-* How to install and use a GC-patched Ruby binary to measure memory usage and object
- allocation.
-* The benchmarking information provided by Rails inside the log files.
-* Various tools facilitating benchmarking and profiling.
-
-Performance testing is an integral part of the development cycle. It is very
-important that you don't make your end users wait for too long before the page
-is completely loaded. Ensuring a pleasant browsing experience for end users and
-cutting the cost of unnecessary hardware is important for any non-trivial web
-application.
-
---------------------------------------------------------------------------------
-
-Performance Test Cases
-----------------------
-
-Rails performance tests are a special type of integration tests, designed for
-benchmarking and profiling the test code. With performance tests, you can
-determine where your application's memory or speed problems are coming from,
-and get a more in-depth picture of those problems.
-
-In a freshly generated Rails application, `test/performance/browsing_test.rb`
-contains an example of a performance test:
-
-```ruby
-require 'test_helper'
-require 'rails/performance_test_help'
-
-class BrowsingTest < ActionDispatch::PerformanceTest
- # Refer to the documentation for all available options
- # self.profile_options = { runs: 5, metrics: [:wall_time, :memory],
- # output: 'tmp/performance', formats: [:flat] }
-
- test "homepage" do
- get '/'
- end
-end
-```
-
-This example is a simple performance test case for profiling a GET request to
-the application's homepage.
-
-### Generating Performance Tests
-
-Rails provides a generator called `performance_test` for creating new
-performance tests:
-
-```bash
-$ rails generate performance_test homepage
-```
-
-This generates `homepage_test.rb` in the `test/performance` directory:
-
-```ruby
-require 'test_helper'
-require 'rails/performance_test_help'
-
-class HomepageTest < ActionDispatch::PerformanceTest
- # Refer to the documentation for all available options
- # self.profile_options = { runs: 5, metrics: [:wall_time, :memory],
- # output: 'tmp/performance', formats: [:flat] }
-
- test "homepage" do
- get '/'
- end
-end
-```
-
-### Examples
-
-Let's assume your application has the following controller and model:
-
-```ruby
-# routes.rb
-root to: 'home#dashboard'
-resources :posts
-
-# home_controller.rb
-class HomeController < ApplicationController
- def dashboard
- @users = User.last_ten.includes(:avatars)
- @posts = Post.all_today
- end
-end
-
-# posts_controller.rb
-class PostsController < ApplicationController
- def create
- @post = Post.create(params[:post])
- redirect_to(@post)
- end
-end
-
-# post.rb
-class Post < ActiveRecord::Base
- before_save :recalculate_costly_stats
-
- def slow_method
- # I fire gallzilion queries sleeping all around
- end
-
- private
-
- def recalculate_costly_stats
- # CPU heavy calculations
- end
-end
-```
-
-#### Controller Example
-
-Because performance tests are a special kind of integration test, you can use
-the `get` and `post` methods in them.
-
-Here's the performance test for `HomeController#dashboard` and
-`PostsController#create`:
-
-```ruby
-require 'test_helper'
-require 'rails/performance_test_help'
-
-class PostPerformanceTest < ActionDispatch::PerformanceTest
- def setup
- # Application requires logged-in user
- login_as(:lifo)
- end
-
- test "homepage" do
- get '/dashboard'
- end
-
- test "creating new post" do
- post '/posts', post: { body: 'lifo is fooling you' }
- end
-end
-```
-
-You can find more details about the `get` and `post` methods in the
-[Testing Rails Applications](testing.html) guide.
-
-#### Model Example
-
-Even though the performance tests are integration tests and hence closer to
-the request/response cycle by nature, you can still performance test pure model
-code.
-
-Performance test for `Post` model:
-
-```ruby
-require 'test_helper'
-require 'rails/performance_test_help'
-
-class PostModelTest < ActionDispatch::PerformanceTest
- test "creation" do
- Post.create body: 'still fooling you', cost: '100'
- end
-
- test "slow method" do
- # Using posts(:awesome) fixture
- posts(:awesome).slow_method
- end
-end
-```
-
-### Modes
-
-Performance tests can be run in two modes: Benchmarking and Profiling.
-
-#### Benchmarking
-
-Benchmarking makes it easy to quickly gather a few metrics about each test run.
-By default, each test case is run **4 times** in benchmarking mode.
-
-To run performance tests in benchmarking mode:
-
-```bash
-$ rake test:benchmark
-```
-
-#### Profiling
-
-Profiling allows you to make an in-depth analysis of each of your tests by using
-an external profiler. Depending on your Ruby interpreter, this profiler can be
-native (Rubinius, JRuby) or not (MRI, which uses RubyProf). By default, each
-test case is run **once** in profiling mode.
-
-To run performance tests in profiling mode:
-
-```bash
-$ rake test:profile
-```
-
-### Metrics
-
-Benchmarking and profiling run performance tests and give you multiple metrics.
-The availability of each metric is determined by the interpreter being used—none
-of them support all metrics—and by the mode in use. A brief description of each
-metric and their availability across interpreters/modes is given below.
-
-#### Wall Time
-
-Wall time measures the real world time elapsed during the test run. It is
-affected by any other processes concurrently running on the system.
-
-#### Process Time
-
-Process time measures the time taken by the process. It is unaffected by any
-other processes running concurrently on the same system. Hence, process time
-is likely to be constant for any given performance test, irrespective of the
-machine load.
-
-#### CPU Time
-
-Similar to process time, but leverages the more accurate CPU clock counter
-available on the Pentium and PowerPC platforms.
-
-#### User Time
-
-User time measures the amount of time the CPU spent in user-mode, i.e. within
-the process. This is not affected by other processes and by the time it possibly
-spends blocked.
-
-#### Memory
-
-Memory measures the amount of memory used for the performance test case.
-
-#### Objects
-
-Objects measures the number of objects allocated for the performance test case.
-
-#### GC Runs
-
-GC Runs measures the number of times GC was invoked for the performance test case.
-
-#### GC Time
-
-GC Time measures the amount of time spent in GC for the performance test case.
-
-#### Metric Availability
-
-##### Benchmarking
-
-| Interpreter | Wall Time | Process Time | CPU Time | User Time | Memory | Objects | GC Runs | GC Time |
-| ------------ | --------- | ------------ | -------- | --------- | ------ | ------- | ------- | ------- |
-| **MRI** | yes | yes | yes | no | yes | yes | yes | yes |
-| **REE** | yes | yes | yes | no | yes | yes | yes | yes |
-| **Rubinius** | yes | no | no | no | yes | yes | yes | yes |
-| **JRuby** | yes | no | no | yes | yes | yes | yes | yes |
-
-##### Profiling
-
-| Interpreter | Wall Time | Process Time | CPU Time | User Time | Memory | Objects | GC Runs | GC Time |
-| ------------ | --------- | ------------ | -------- | --------- | ------ | ------- | ------- | ------- |
-| **MRI** | yes | yes | no | no | yes | yes | yes | yes |
-| **REE** | yes | yes | no | no | yes | yes | yes | yes |
-| **Rubinius** | yes | no | no | no | no | no | no | no |
-| **JRuby** | yes | no | no | no | no | no | no | no |
-
-NOTE: To profile under JRuby you'll need to run `export JRUBY_OPTS="-Xlaunch.inproc=false --profile.api"`
-**before** the performance tests.
-
-### Understanding the Output
-
-Performance tests generate different outputs inside `tmp/performance` directory
-depending on their mode and metric.
-
-#### Benchmarking
-
-In benchmarking mode, performance tests generate two types of outputs.
-
-##### Command Line
-
-This is the primary form of output in benchmarking mode. Example:
-
-```bash
-BrowsingTest#test_homepage (31 ms warmup)
- wall_time: 6 ms
- memory: 437.27 KB
- objects: 5,514
- gc_runs: 0
- gc_time: 19 ms
-```
-
-##### CSV Files
-
-Performance test results are also appended to `.csv` files inside `tmp/performance`.
-For example, running the default `BrowsingTest#test_homepage` will generate
-following five files:
-
-* BrowsingTest#test_homepage_gc_runs.csv
-* BrowsingTest#test_homepage_gc_time.csv
-* BrowsingTest#test_homepage_memory.csv
-* BrowsingTest#test_homepage_objects.csv
-* BrowsingTest#test_homepage_wall_time.csv
-
-As the results are appended to these files each time the performance tests are
-run in benchmarking mode, you can collect data over a period of time. This can
-be very helpful in analyzing the effects of code changes.
-
-Sample output of `BrowsingTest#test_homepage_wall_time.csv`:
-
-```bash
-measurement,created_at,app,rails,ruby,platform
-0.00738224999999992,2009-01-08T03:40:29Z,,3.0.0,ruby-1.8.7.249,x86_64-linux
-0.00755874999999984,2009-01-08T03:46:18Z,,3.0.0,ruby-1.8.7.249,x86_64-linux
-0.00762099999999993,2009-01-08T03:49:25Z,,3.0.0,ruby-1.8.7.249,x86_64-linux
-0.00603075000000008,2009-01-08T04:03:29Z,,3.0.0,ruby-1.8.7.249,x86_64-linux
-0.00619899999999995,2009-01-08T04:03:53Z,,3.0.0,ruby-1.8.7.249,x86_64-linux
-0.00755449999999991,2009-01-08T04:04:55Z,,3.0.0,ruby-1.8.7.249,x86_64-linux
-0.00595999999999997,2009-01-08T04:05:06Z,,3.0.0,ruby-1.8.7.249,x86_64-linux
-0.00740450000000004,2009-01-09T03:54:47Z,,3.0.0,ruby-1.8.7.249,x86_64-linux
-0.00603150000000008,2009-01-09T03:54:57Z,,3.0.0,ruby-1.8.7.249,x86_64-linux
-0.00771250000000012,2009-01-09T15:46:03Z,,3.0.0,ruby-1.8.7.249,x86_64-linux
-```
-
-#### Profiling
-
-In profiling mode, performance tests can generate multiple types of outputs.
-The command line output is always presented but support for the others is
-dependent on the interpreter in use. A brief description of each type and
-their availability across interpreters is given below.
-
-##### Command Line
-
-This is a very basic form of output in profiling mode:
-
-```bash
-BrowsingTest#test_homepage (58 ms warmup)
- process_time: 63 ms
- memory: 832.13 KB
- objects: 7,882
-```
-
-##### Flat
-
-Flat output shows the metric—time, memory, etc—measure in each method.
-[Check Ruby-Prof documentation for a better explanation](http://ruby-prof.rubyforge.org/files/examples/flat_txt.html).
-
-##### Graph
-
-Graph output shows the metric measure in each method, which methods call it and
-which methods it calls. [Check Ruby-Prof documentation for a better explanation](http://ruby-prof.rubyforge.org/files/examples/graph_txt.html).
-
-##### Tree
-
-Tree output is profiling information in calltree format for use by [kcachegrind](http://kcachegrind.sourceforge.net/html/Home.html)
-and similar tools.
-
-##### Output Availability
-
-| | Flat | Graph | Tree |
-| ------------ | ---- | ----- | ---- |
-| **MRI** | yes | yes | yes |
-| **REE** | yes | yes | yes |
-| **Rubinius** | yes | yes | no |
-| **JRuby** | yes | yes | no |
-
-### Tuning Test Runs
-
-Test runs can be tuned by setting the `profile_options` class variable on your
-test class.
-
-```ruby
-require 'test_helper'
-require 'rails/performance_test_help'
-
-class BrowsingTest < ActionDispatch::PerformanceTest
- self.profile_options = { runs: 5, metrics: [:wall_time, :memory] }
-
- test "homepage"
- get '/'
- end
-end
-```
-
-In this example, the test would run 5 times and measure wall time and memory.
-There are a few configurable options:
-
-| Option | Description | Default | Mode |
-| ---------- | ------------------------------------------ | ----------------------------- | --------- |
-| `:runs` | Number of runs. | Benchmarking: 4, Profiling: 1 | Both |
-| `:output` | Directory to use when writing the results. | `tmp/performance` | Both |
-| `:metrics` | Metrics to use. | See below. | Both |
-| `:formats` | Formats to output to. | See below. | Profiling |
-
-Metrics and formats have different defaults depending on the interpreter in use.
-
-| Interpreter | Mode | Default metrics | Default formats |
-| -------------- | ------------ | ------------------------------------------------------- | ----------------------------------------------- |
-| **MRI/REE** | Benchmarking | `[:wall_time, :memory, :objects, :gc_runs, :gc_time]` | N/A |
-| | Profiling | `[:process_time, :memory, :objects]` | `[:flat, :graph_html, :call_tree, :call_stack]` |
-| **Rubinius** | Benchmarking | `[:wall_time, :memory, :objects, :gc_runs, :gc_time]` | N/A |
-| | Profiling | `[:wall_time]` | `[:flat, :graph]` |
-| **JRuby** | Benchmarking | `[:wall_time, :user_time, :memory, :gc_runs, :gc_time]` | N/A |
-| | Profiling | `[:wall_time]` | `[:flat, :graph]` |
-
-As you've probably noticed by now, metrics and formats are specified using a
-symbol array with each name [underscored.](http://api.rubyonrails.org/classes/String.html#method-i-underscore)
-
-### Performance Test Environment
-
-Performance tests are run in the `test` environment. But running performance
-tests will set the following configuration parameters:
-
-```bash
-ActionController::Base.perform_caching = true
-ActiveSupport::Dependencies.mechanism = :require
-Rails.logger.level = ActiveSupport::Logger::INFO
-```
-
-As `ActionController::Base.perform_caching` is set to `true`, performance tests
-will behave much as they do in the `production` environment.
-
-### Installing GC-Patched MRI
-
-To get the best from Rails' performance tests under MRI, you'll need to build
-a special Ruby binary with some super powers.
-
-The recommended patches for MRI can be found on [RVM's _patches_ directory](https://github.com/wayneeseguin/rvm/tree/master/patches/ruby)
-under each specific interpreter version.
-
-Concerning the installation itself, you can either do this easily by using
-[RVM](https://rvm.io/) or you can build everything from source,
-which is a little bit harder.
-
-#### Install Using RVM
-
-The process of installing a patched Ruby interpreter is very easy if you let RVM
-do the hard work. All of the following RVM commands will provide you with a
-patched Ruby interpreter:
-
-```bash
-$ rvm install 1.9.2-p180 --patch gcdata
-$ rvm install 1.9.2-p180 --patch ~/Downloads/downloaded_gcdata_patch.patch
-```
-
-You can even keep your regular interpreter by assigning a name to the patched
-one:
-
-```bash
-$ rvm install 1.9.2-p180 --patch gcdata --name gcdata
-$ rvm use 1.9.2-p180 # your regular ruby
-$ rvm use 1.9.2-p180-gcdata # your patched ruby
-```
-
-And it's done! You have installed a patched Ruby interpreter.
-
-#### Install From Source
-
-This process is a bit more complicated, but straightforward nonetheless. If
-you've never compiled a Ruby binary before, follow these steps to build a
-Ruby binary inside your home directory.
-
-##### Download and Extract
-
-```bash
-$ mkdir rubygc
-$ wget <the version you want from ftp://ftp.ruby-lang.org/pub/ruby>
-$ tar -xzvf <ruby-version.tar.gz>
-$ cd <ruby-version>
-```
-
-##### Apply the Patch
-
-```bash
-$ curl https://raw.github.com/wayneeseguin/rvm/master/patches/ruby/1.9.2/p180/gcdata.patch | patch -p0 # if you're on 1.9.2!
-```
-
-##### Configure and Install
-
-The following will install Ruby in your home directory's `/rubygc` directory.
-Make sure to replace `<homedir>` with a full patch to your actual home
-directory.
-
-```bash
-$ ./configure --prefix=/<homedir>/rubygc
-$ make && make install
-```
-
-##### Prepare Aliases
-
-For convenience, add the following lines in your `~/.profile`:
-
-```bash
-alias gcruby='~/rubygc/bin/ruby'
-alias gcrake='~/rubygc/bin/rake'
-alias gcgem='~/rubygc/bin/gem'
-alias gcirb='~/rubygc/bin/irb'
-alias gcrails='~/rubygc/bin/rails'
-```
-
-Don't forget to use your aliases from now on.
-
-### Using Ruby-Prof on MRI and REE
-
-Add Ruby-Prof to your applications' Gemfile if you want to benchmark/profile
-under MRI or REE:
-
-```ruby
-gem 'ruby-prof'
-```
-
-Now run `bundle install` and you're ready to go.
-
-Command Line Tools
-------------------
-
-Writing performance test cases could be an overkill when you are looking for one
-time tests. Rails ships with two command line tools that enable quick and dirty
-performance testing:
-
-### `benchmarker`
-
-Usage:
-
-```bash
-Usage: rails benchmarker 'Ruby.code' 'Ruby.more_code' ... [OPTS]
- -r, --runs N Number of runs.
- Default: 4
- -o, --output PATH Directory to use when writing the results.
- Default: tmp/performance
- -m, --metrics a,b,c Metrics to use.
- Default: wall_time,memory,objects,gc_runs,gc_time
-```
-
-Example:
-
-```bash
-$ rails benchmarker 'Item.all' 'CouchItem.all' --runs 3 --metrics wall_time,memory
-```
-
-### `profiler`
-
-Usage:
-
-```bash
-Usage: rails profiler 'Ruby.code' 'Ruby.more_code' ... [OPTS]
- -r, --runs N Number of runs.
- Default: 1
- -o, --output PATH Directory to use when writing the results.
- Default: tmp/performance
- -m, --metrics a,b,c Metrics to use.
- Default: process_time,memory,objects
- -f, --formats x,y,z Formats to output to.
- Default: flat,graph_html,call_tree
-```
-
-Example:
-
-```bash
-$ rails profiler 'Item.all' 'CouchItem.all' --runs 2 --metrics process_time --formats flat
-```
-
-NOTE: Metrics and formats vary from interpreter to interpreter. Pass `--help` to
-each tool to see the defaults for your interpreter.
-
-Helper Methods
---------------
-
-Rails provides various helper methods inside Active Record, Action Controller
-and Action View to measure the time taken by a given piece of code. The method
-is called `benchmark()` in all the three components.
-
-### Model
-
-```ruby
-Project.benchmark("Creating project") do
- project = Project.create("name" => "stuff")
- project.create_manager("name" => "David")
- project.milestones << Milestone.all
-end
-```
-
-This benchmarks the code enclosed in the `Project.benchmark("Creating project") do...end`
-block and prints the result to the log file:
-
-```ruby
-Creating project (185.3ms)
-```
-
-Please refer to the [API docs](http://api.rubyonrails.org/classes/ActiveSupport/Benchmarkable.html#method-i-benchmark)
-for additional options to `benchmark()`.
-
-### Controller
-
-Similarly, you could use this helper method inside [controllers.](http://api.rubyonrails.org/classes/ActiveSupport/Benchmarkable.html)
-
-```ruby
-def process_projects
- benchmark("Processing projects") do
- Project.process(params[:project_ids])
- Project.update_cached_projects
- end
-end
-```
-
-NOTE: `benchmark` is a class method inside controllers.
-
-### View
-
-And in [views](http://api.rubyonrails.org/classes/ActiveSupport/Benchmarkable.html:)
-
-```erb
-<% benchmark("Showing projects partial") do %>
- <%= render @projects %>
-<% end %>
-```
-
-Request Logging
----------------
-
-Rails log files contain very useful information about the time taken to serve
-each request. Here's a typical log file entry:
-
-```bash
-Processing ItemsController#index (for 127.0.0.1 at 2009-01-08 03:06:39) [GET]
-Rendering template within layouts/items
-Rendering items/index
-Completed in 5ms (View: 2, DB: 0) | 200 OK [http://0.0.0.0/items]
-```
-
-For this section, we're only interested in the last line:
-
-```bash
-Completed in 5ms (View: 2, DB: 0) | 200 OK [http://0.0.0.0/items]
-```
-
-This data is fairly straightforward to understand. Rails uses millisecond(ms) as
-the metric to measure the time taken. The complete request spent 5 ms inside
-Rails, out of which 2 ms were spent rendering views and none was spent
-communication with the database. It's safe to assume that the remaining 3 ms
-were spent inside the controller.
-
-Michael Koziarski has an [interesting blog post](http://www.therailsway.com/2009/1/6/requests-per-second)
-explaining the importance of using milliseconds as the metric.
-
-Useful Links
-------------
-
-### Rails Plugins and Gems
-
-* [Rails Analyzer](http://rails-analyzer.rubyforge.org)
-* [Rails Footnotes](https://github.com/josevalim/rails-footnotes/tree/master)
-* [Query Reviewer](https://github.com/nesquena/query_reviewer)
-* [MiniProfiler](http://www.miniprofiler.com)
-
-### Generic Tools
-
-* [httperf](http://www.hpl.hp.com/research/linux/httperf/)
-* [ab](http://httpd.apache.org/docs/2.2/programs/ab.html)
-* [JMeter](http://jakarta.apache.org/jmeter/)
-* [kcachegrind](http://kcachegrind.sourceforge.net/html/Home.html)
-
-### Tutorials and Documentation
-
-* [ruby-prof API Documentation](http://ruby-prof.rubyforge.org)
-* [Request Profiling Railscast](http://railscasts.com/episodes/98-request-profiling) - Outdated, but useful for understanding call graphs.
-
-Commercial Products
--------------------
-
-Rails has been lucky to have a few companies dedicated to Rails-specific
-performance tools. A couple of those are:
-
-* [New Relic](http://www.newrelic.com)
-* [Scout](http://scoutapp.com)
diff --git a/guides/source/testing.md b/guides/source/testing.md
index 7747318d32..09d6d2d5ee 100644
--- a/guides/source/testing.md
+++ b/guides/source/testing.md
@@ -39,10 +39,10 @@ Rails creates a `test` folder for you as soon as you create a Rails project usin
```bash
$ ls -F test
-fixtures/ functional/ integration/ performance/ test_helper.rb unit/
+fixtures/ functional/ integration/ test_helper.rb unit/
```
-The `unit` directory is meant to hold tests for your models, the `functional` directory is meant to hold tests for your controllers, the `integration` directory is meant to hold tests that involve any number of controllers interacting, and the `performance` directory is meant for performance tests.
+The `unit` directory is meant to hold tests for your models, the `functional` directory is meant to hold tests for your controllers and the `integration` directory is meant to hold tests that involve any number of controllers interacting.
Fixtures are a way of organizing test data; they reside in the `fixtures` folder.
@@ -760,14 +760,12 @@ You don't need to set up and run your tests by hand on a test-by-test basis. Rai
| Tasks | Description |
| ------------------------------- | ----------- |
| `rake test` | Runs all unit, functional and integration tests. You can also simply run `rake` as the _test_ target is the default.|
-| `rake test:benchmark` | Benchmark the performance tests|
| `rake test:controllers` | Runs all the controller tests from `test/controllers`|
| `rake test:functionals` | Runs all the functional tests from `test/controllers`, `test/mailers`, and `test/functional`|
| `rake test:helpers` | Runs all the helper tests from `test/helpers`|
| `rake test:integration` | Runs all the integration tests from `test/integration`|
| `rake test:mailers` | Runs all the mailer tests from `test/mailers`|
| `rake test:models` | Runs all the model tests from `test/models`|
-| `rake test:profile` | Profile the performance tests|
| `rake test:recent` | Tests recent changes|
| `rake test:uncommitted` | Runs all the tests which are uncommitted. Supports Subversion and Git|
| `rake test:units` | Runs all the unit tests from `test/models`, `test/helpers`, and `test/unit`|
diff --git a/rails.gemspec b/rails.gemspec
index 5505ac45a8..34957c9455 100644
--- a/rails.gemspec
+++ b/rails.gemspec
@@ -26,6 +26,6 @@ Gem::Specification.new do |s|
s.add_dependency 'actionmailer', version
s.add_dependency 'railties', version
- s.add_dependency 'bundler', '>= 1.3.0.pre.4', '< 2.0'
+ s.add_dependency 'bundler', '>= 1.2.2', '< 2.0'
s.add_dependency 'sprockets-rails', '~> 2.0.0.rc1'
end
diff --git a/railties/lib/rails/commands.rb b/railties/lib/rails/commands.rb
index 9cb11f66c6..aacde52cfc 100644
--- a/railties/lib/rails/commands.rb
+++ b/railties/lib/rails/commands.rb
@@ -24,8 +24,6 @@ The most common rails commands are:
In addition to those, there are:
application Generate the Rails application code
destroy Undo code generated with "generate" (short-cut alias: "d")
- benchmarker See how fast a piece of code runs
- profiler Get profile information from a piece of code
plugin new Generates skeleton for developing a Rails plugin
runner Run a piece of code in the application environment (short-cut alias: "r")
@@ -51,11 +49,6 @@ when 'generate', 'destroy', 'plugin'
require "rails/commands/#{command}"
end
-when 'benchmarker', 'profiler'
- require APP_PATH
- Rails.application.require_environment!
- require "rails/commands/#{command}"
-
when 'console'
require 'rails/commands/console'
options = Rails::Console.parse_arguments(ARGV)
diff --git a/railties/lib/rails/commands/benchmarker.rb b/railties/lib/rails/commands/benchmarker.rb
deleted file mode 100644
index b745b45e17..0000000000
--- a/railties/lib/rails/commands/benchmarker.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-require 'optparse'
-require 'rails/test_help'
-require 'rails/performance_test_help'
-
-ARGV.push('--benchmark') # HAX
-require 'active_support/testing/performance'
-ARGV.pop
-
-def options
- options = {}
- defaults = ActiveSupport::Testing::Performance::DEFAULTS
-
- OptionParser.new do |opt|
- opt.banner = "Usage: rails benchmarker 'Ruby.code' 'Ruby.more_code' ... [OPTS]"
- opt.on('-r', '--runs N', Numeric, 'Number of runs.', "Default: #{defaults[:runs]}") { |r| options[:runs] = r }
- opt.on('-o', '--output PATH', String, 'Directory to use when writing the results.', "Default: #{defaults[:output]}") { |o| options[:output] = o }
- opt.on('-m', '--metrics a,b,c', Array, 'Metrics to use.', "Default: #{defaults[:metrics].join(",")}") { |m| options[:metrics] = m.map(&:to_sym) }
- opt.parse!(ARGV)
- end
-
- options
-end
-
-class BenchmarkerTest < ActionDispatch::PerformanceTest #:nodoc:
- self.profile_options = options
-
- ARGV.each do |expression|
- eval <<-RUBY
- def test_#{expression.parameterize('_')}
- #{expression}
- end
- RUBY
- end
-end
diff --git a/railties/lib/rails/commands/profiler.rb b/railties/lib/rails/commands/profiler.rb
deleted file mode 100644
index 315bcccf61..0000000000
--- a/railties/lib/rails/commands/profiler.rb
+++ /dev/null
@@ -1,32 +0,0 @@
-require 'optparse'
-require 'rails/test_help'
-require 'rails/performance_test_help'
-require 'active_support/testing/performance'
-
-def options
- options = {}
- defaults = ActiveSupport::Testing::Performance::DEFAULTS
-
- OptionParser.new do |opt|
- opt.banner = "Usage: rails profiler 'Ruby.code' 'Ruby.more_code' ... [OPTS]"
- opt.on('-r', '--runs N', Numeric, 'Number of runs.', "Default: #{defaults[:runs]}") { |r| options[:runs] = r }
- opt.on('-o', '--output PATH', String, 'Directory to use when writing the results.', "Default: #{defaults[:output]}") { |o| options[:output] = o }
- opt.on('-m', '--metrics a,b,c', Array, 'Metrics to use.', "Default: #{defaults[:metrics].join(",")}") { |m| options[:metrics] = m.map(&:to_sym) }
- opt.on('-f', '--formats x,y,z', Array, 'Formats to output to.', "Default: #{defaults[:formats].join(",")}") { |m| options[:formats] = m.map(&:to_sym) }
- opt.parse!(ARGV)
- end
-
- options
-end
-
-class ProfilerTest < ActionDispatch::PerformanceTest #:nodoc:
- self.profile_options = options
-
- ARGV.each do |expression|
- eval <<-RUBY
- def test_#{expression.parameterize('_')}
- #{expression}
- end
- RUBY
- end
-end
diff --git a/railties/lib/rails/generators.rb b/railties/lib/rails/generators.rb
index d9a91b74d1..4b767ea0c6 100644
--- a/railties/lib/rails/generators.rb
+++ b/railties/lib/rails/generators.rb
@@ -50,7 +50,6 @@ module Rails
javascripts: true,
javascript_engine: :js,
orm: false,
- performance_tool: nil,
resource_controller: :controller,
resource_route: true,
scaffold_controller: :scaffold_controller,
@@ -179,7 +178,6 @@ module Rails
"#{test}:model",
"#{test}:scaffold",
"#{test}:view",
- "#{test}:performance",
"#{template}:controller",
"#{template}:scaffold",
"#{template}:mailer",
diff --git a/railties/lib/rails/generators/rails/app/app_generator.rb b/railties/lib/rails/generators/rails/app/app_generator.rb
index 784c97aee0..e22be40381 100644
--- a/railties/lib/rails/generators/rails/app/app_generator.rb
+++ b/railties/lib/rails/generators/rails/app/app_generator.rb
@@ -114,7 +114,6 @@ module Rails
empty_directory_with_keep_file 'test/helpers'
empty_directory_with_keep_file 'test/integration'
- template 'test/performance/browsing_test.rb'
template 'test/test_helper.rb'
end
@@ -141,7 +140,7 @@ module Rails
# We need to store the RAILS_DEV_PATH in a constant, otherwise the path
# can change in Ruby 1.8.7 when we FileUtils.cd.
RAILS_DEV_PATH = File.expand_path("../../../../../..", File.dirname(__FILE__))
- RESERVED_NAMES = %w[application destroy benchmarker profiler plugin runner test]
+ RESERVED_NAMES = %w[application destroy plugin runner test]
class AppGenerator < AppBase # :nodoc:
add_shared_options_for "application"
diff --git a/railties/lib/rails/generators/rails/app/templates/Gemfile b/railties/lib/rails/generators/rails/app/templates/Gemfile
index c4846b2c11..2b80896c27 100644
--- a/railties/lib/rails/generators/rails/app/templates/Gemfile
+++ b/railties/lib/rails/generators/rails/app/templates/Gemfile
@@ -9,12 +9,12 @@ source 'https://rubygems.org'
<%= assets_gemfile_entry %>
<%= javascript_gemfile_entry -%>
+# Build JSON APIs with ease. Read more: https://github.com/rails/jbuilder
+gem 'jbuilder'
+
# To use ActiveModel has_secure_password
# gem 'bcrypt-ruby', '~> 3.0.0'
-# Build JSON APIs with ease. Read more: https://github.com/rails/jbuilder
-# gem 'jbuilder'
-
# Use unicorn as the app server
# gem 'unicorn'
diff --git a/railties/lib/rails/generators/rails/app/templates/test/performance/browsing_test.rb b/railties/lib/rails/generators/rails/app/templates/test/performance/browsing_test.rb
deleted file mode 100644
index d09ce5ad34..0000000000
--- a/railties/lib/rails/generators/rails/app/templates/test/performance/browsing_test.rb
+++ /dev/null
@@ -1,12 +0,0 @@
-require 'test_helper'
-require 'rails/performance_test_help'
-
-class BrowsingTest < ActionDispatch::PerformanceTest
- # Refer to the documentation for all available options
- # self.profile_options = { runs: 5, metrics: [:wall_time, :memory],
- # output: 'tmp/performance', formats: [:flat] }
-
- test "homepage" do
- get '/'
- end
-end
diff --git a/railties/lib/rails/generators/rails/performance_test/USAGE b/railties/lib/rails/generators/rails/performance_test/USAGE
deleted file mode 100644
index 9dc799559c..0000000000
--- a/railties/lib/rails/generators/rails/performance_test/USAGE
+++ /dev/null
@@ -1,10 +0,0 @@
-Description:
- Stubs out a new performance test. Pass the name of the test, either
- CamelCased or under_scored, as an argument.
-
- This generator invokes the current performance tool, which defaults to
- TestUnit.
-
-Example:
- `rails generate performance_test GeneralStories` creates a GeneralStories
- performance test in test/performance/general_stories_test.rb
diff --git a/railties/lib/rails/generators/rails/performance_test/performance_test_generator.rb b/railties/lib/rails/generators/rails/performance_test/performance_test_generator.rb
deleted file mode 100644
index 56cd562f3d..0000000000
--- a/railties/lib/rails/generators/rails/performance_test/performance_test_generator.rb
+++ /dev/null
@@ -1,7 +0,0 @@
-module Rails
- module Generators
- class PerformanceTestGenerator < NamedBase # :nodoc:
- hook_for :performance_tool, as: :performance
- end
- end
-end
diff --git a/railties/lib/rails/generators/rails/scaffold/scaffold_generator.rb b/railties/lib/rails/generators/rails/scaffold/scaffold_generator.rb
index dd636ed3cf..9a7ea1f657 100644
--- a/railties/lib/rails/generators/rails/scaffold/scaffold_generator.rb
+++ b/railties/lib/rails/generators/rails/scaffold/scaffold_generator.rb
@@ -9,13 +9,8 @@ module Rails
class_option :stylesheets, type: :boolean, desc: "Generate Stylesheets"
class_option :stylesheet_engine, desc: "Engine for Stylesheets"
- class_option :html, type: :boolean, default: true,
- desc: "Generate a scaffold with HTML output"
-
def handle_skip
- if !options[:html] || !options[:stylesheets]
- @options = @options.merge(stylesheet_engine: false)
- end
+ @options = @options.merge(stylesheet_engine: false) if !options[:stylesheets]
end
hook_for :scaffold_controller, required: true
diff --git a/railties/lib/rails/generators/rails/scaffold_controller/scaffold_controller_generator.rb b/railties/lib/rails/generators/rails/scaffold_controller/scaffold_controller_generator.rb
index 32fa54a362..4f36b612ae 100644
--- a/railties/lib/rails/generators/rails/scaffold_controller/scaffold_controller_generator.rb
+++ b/railties/lib/rails/generators/rails/scaffold_controller/scaffold_controller_generator.rb
@@ -10,17 +10,8 @@ module Rails
class_option :orm, banner: "NAME", type: :string, required: true,
desc: "ORM to generate the controller for"
- class_option :html, type: :boolean, default: true,
- desc: "Generate a scaffold with HTML output"
-
argument :attributes, type: :array, default: [], banner: "field:type field:type"
- def handle_skip
- unless options[:html]
- @options = @options.merge(template_engine: false, helper: false)
- end
- end
-
def create_controller_files
template "controller.rb", File.join('app/controllers', class_path, "#{controller_file_name}_controller.rb")
end
diff --git a/railties/lib/rails/generators/rails/scaffold_controller/templates/controller.rb b/railties/lib/rails/generators/rails/scaffold_controller/templates/controller.rb
index e11d357314..6f7eb07229 100644
--- a/railties/lib/rails/generators/rails/scaffold_controller/templates/controller.rb
+++ b/railties/lib/rails/generators/rails/scaffold_controller/templates/controller.rb
@@ -7,95 +7,47 @@ class <%= controller_class_name %>Controller < ApplicationController
before_action :set_<%= singular_table_name %>, only: [:show, :edit, :update, :destroy]
# GET <%= route_url %>
- # GET <%= route_url %>.json
def index
@<%= plural_table_name %> = <%= orm_class.all(class_name) %>
-
- respond_to do |format|
- <%- if options[:html] -%>
- format.html # index.html.erb
- <%- end -%>
- format.json { render json: <%= "@#{plural_table_name}" %> }
- end
end
# GET <%= route_url %>/1
- # GET <%= route_url %>/1.json
def show
- respond_to do |format|
- <%- if options[:html] -%>
- format.html # show.html.erb
- <%- end -%>
- format.json { render json: <%= "@#{singular_table_name}" %> }
- end
end
- <%- if options[:html] -%>
# GET <%= route_url %>/new
- # GET <%= route_url %>/new.json
def new
@<%= singular_table_name %> = <%= orm_class.build(class_name) %>
-
- respond_to do |format|
- format.html # new.html.erb
- format.json { render json: <%= "@#{singular_table_name}" %> }
- end
end
# GET <%= route_url %>/1/edit
def edit
end
- <%- end -%>
# POST <%= route_url %>
- # POST <%= route_url %>.json
def create
@<%= singular_table_name %> = <%= orm_class.build(class_name, "#{singular_table_name}_params") %>
- respond_to do |format|
- if @<%= orm_instance.save %>
- <%- if options[:html] -%>
- format.html { redirect_to @<%= singular_table_name %>, notice: <%= "'#{human_name} was successfully created.'" %> }
- <%- end -%>
- format.json { render json: <%= "@#{singular_table_name}" %>, status: :created, location: <%= "@#{singular_table_name}" %> }
- else
- <%- if options[:html] -%>
- format.html { render action: "new" }
- <%- end -%>
- format.json { render json: <%= "@#{orm_instance.errors}" %>, status: :unprocessable_entity }
- end
+ if @<%= orm_instance.save %>
+ redirect_to @<%= singular_table_name %>, notice: <%= "'#{human_name} was successfully created.'" %>
+ else
+ render action: "new"
end
end
# PATCH/PUT <%= route_url %>/1
- # PATCH/PUT <%= route_url %>/1.json
def update
- respond_to do |format|
- if @<%= orm_instance.update("#{singular_table_name}_params") %>
- <%- if options[:html] -%>
- format.html { redirect_to @<%= singular_table_name %>, notice: <%= "'#{human_name} was successfully updated.'" %> }
- <%- end -%>
- format.json { head :no_content }
- else
- <%- if options[:html] -%>
- format.html { render action: "edit" }
- <%- end -%>
- format.json { render json: <%= "@#{orm_instance.errors}" %>, status: :unprocessable_entity }
- end
+ if @<%= orm_instance.update("#{singular_table_name}_params") %>
+ redirect_to @<%= singular_table_name %>, notice: <%= "'#{human_name} was successfully updated.'" %>
+ else
+ render action: "edit"
end
end
# DELETE <%= route_url %>/1
- # DELETE <%= route_url %>/1.json
def destroy
@<%= orm_instance.destroy %>
-
- respond_to do |format|
- <%- if options[:html] -%>
- format.html { redirect_to <%= index_helper %>_url }
- <%- end -%>
- format.json { head :no_content }
- end
+ redirect_to <%= index_helper %>_url
end
private
diff --git a/railties/lib/rails/generators/test_unit/performance/performance_generator.rb b/railties/lib/rails/generators/test_unit/performance/performance_generator.rb
deleted file mode 100644
index 5552edeee4..0000000000
--- a/railties/lib/rails/generators/test_unit/performance/performance_generator.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-require 'rails/generators/test_unit'
-
-module TestUnit # :nodoc:
- module Generators # :nodoc:
- class PerformanceGenerator < Base # :nodoc:
- check_class_collision suffix: "Test"
-
- def create_test_files
- template 'performance_test.rb', File.join('test/performance', class_path, "#{file_name}_test.rb")
- end
- end
- end
-end
diff --git a/railties/lib/rails/generators/test_unit/performance/templates/performance_test.rb b/railties/lib/rails/generators/test_unit/performance/templates/performance_test.rb
deleted file mode 100644
index 2bcb482d68..0000000000
--- a/railties/lib/rails/generators/test_unit/performance/templates/performance_test.rb
+++ /dev/null
@@ -1,12 +0,0 @@
-require 'test_helper'
-require 'rails/performance_test_help'
-
-class <%= class_name %>Test < ActionDispatch::PerformanceTest
- # Refer to the documentation for all available options
- # self.profile_options = { runs: 5, metrics: [:wall_time, :memory],
- # output: 'tmp/performance', formats: [:flat] }
-
- test "homepage" do
- get '/'
- end
-end
diff --git a/railties/lib/rails/performance_test_help.rb b/railties/lib/rails/performance_test_help.rb
deleted file mode 100644
index b1285efde2..0000000000
--- a/railties/lib/rails/performance_test_help.rb
+++ /dev/null
@@ -1,3 +0,0 @@
-ActionController::Base.perform_caching = true
-ActiveSupport::Dependencies.mechanism = :require
-Rails.logger.level = ActiveSupport::Logger::INFO
diff --git a/railties/lib/rails/test_unit/railtie.rb b/railties/lib/rails/test_unit/railtie.rb
index ed89ce4128..f52c4c44b7 100644
--- a/railties/lib/rails/test_unit/railtie.rb
+++ b/railties/lib/rails/test_unit/railtie.rb
@@ -5,7 +5,6 @@ module Rails
fixture_replacement: nil
c.integration_tool :test_unit
- c.performance_tool :test_unit
end
rake_tasks do
diff --git a/railties/lib/rails/test_unit/testing.rake b/railties/lib/rails/test_unit/testing.rake
index 9ad3a4e6d6..f0d46fd959 100644
--- a/railties/lib/rails/test_unit/testing.rake
+++ b/railties/lib/rails/test_unit/testing.rake
@@ -45,7 +45,7 @@ end
task default: :test
-desc 'Runs test:units, test:functionals, test:integration together (also available: test:benchmark, test:profile)'
+desc 'Runs test:units, test:functionals, test:integration together'
task :test do
Rake::Task[ENV['TEST'] ? 'test:single' : 'test:run'].invoke
end
@@ -146,15 +146,4 @@ namespace :test do
t.libs << "test"
t.pattern = 'test/integration/**/*_test.rb'
end
-
- Rails::SubTestTask.new(benchmark: 'test:prepare') do |t|
- t.libs << 'test'
- t.pattern = 'test/performance/**/*_test.rb'
- t.options = '-- --benchmark'
- end
-
- Rails::SubTestTask.new(profile: 'test:prepare') do |t|
- t.libs << 'test'
- t.pattern = 'test/performance/**/*_test.rb'
- end
end
diff --git a/railties/test/application/test_test.rb b/railties/test/application/test_test.rb
index f6bcaa466a..c7ad2fba8f 100644
--- a/railties/test/application/test_test.rb
+++ b/railties/test/application/test_test.rb
@@ -52,31 +52,6 @@ module ApplicationTests
run_test_file 'integration/posts_test.rb'
end
- test "performance test" do
- controller 'posts', <<-RUBY
- class PostsController < ActionController::Base
- end
- RUBY
-
- app_file 'app/views/posts/index.html.erb', <<-HTML
- Posts#index
- HTML
-
- app_file 'test/performance/posts_test.rb', <<-RUBY
- require 'test_helper'
- require 'rails/performance_test_help'
-
- class PostsTest < ActionDispatch::PerformanceTest
- def test_index
- get '/posts'
- assert_response :success
- end
- end
- RUBY
-
- run_test_file 'performance/posts_test.rb'
- end
-
private
def run_test_file(name)
result = ruby '-Itest', "#{app_path}/test/#{name}"
diff --git a/railties/test/generators/app_generator_test.rb b/railties/test/generators/app_generator_test.rb
index 5d5be689e6..ee93dc49cd 100644
--- a/railties/test/generators/app_generator_test.rb
+++ b/railties/test/generators/app_generator_test.rb
@@ -34,7 +34,6 @@ DEFAULT_APP_FILES = %w(
test/helpers
test/mailers
test/integration
- test/performance
vendor
vendor/assets
tmp/cache
@@ -218,7 +217,6 @@ class AppGeneratorTest < Rails::Generators::TestCase
assert_file "test/test_helper.rb" do |helper_content|
assert_no_match(/fixtures :all/, helper_content)
end
- assert_file "test/performance/browsing_test.rb"
end
def test_generator_if_skip_sprockets_is_given
@@ -241,7 +239,6 @@ class AppGeneratorTest < Rails::Generators::TestCase
assert_no_match(/config\.assets\.css_compressor = :sass/, content)
assert_no_match(/config\.assets\.version = '1\.0'/, content)
end
- assert_file "test/performance/browsing_test.rb"
end
def test_inclusion_of_javascript_runtime
diff --git a/railties/test/generators/performance_test_generator_test.rb b/railties/test/generators/performance_test_generator_test.rb
deleted file mode 100644
index 37f9857193..0000000000
--- a/railties/test/generators/performance_test_generator_test.rb
+++ /dev/null
@@ -1,12 +0,0 @@
-require 'generators/generators_test_helper'
-require 'rails/generators/rails/performance_test/performance_test_generator'
-
-class PerformanceTestGeneratorTest < Rails::Generators::TestCase
- include GeneratorsTestHelper
- arguments %w(performance)
-
- def test_performance_test_skeleton_is_created
- run_generator
- assert_file "test/performance/performance_test.rb", /class PerformanceTest < ActionDispatch::PerformanceTest/
- end
-end
diff --git a/railties/test/generators/scaffold_controller_generator_test.rb b/railties/test/generators/scaffold_controller_generator_test.rb
index ab00586a64..bd99e78644 100644
--- a/railties/test/generators/scaffold_controller_generator_test.rb
+++ b/railties/test/generators/scaffold_controller_generator_test.rb
@@ -31,12 +31,10 @@ class ScaffoldControllerGeneratorTest < Rails::Generators::TestCase
assert_instance_method :create, content do |m|
assert_match(/@user = User\.new\(user_params\)/, m)
assert_match(/@user\.save/, m)
- assert_match(/@user\.errors/, m)
end
assert_instance_method :update, content do |m|
assert_match(/@user\.update\(user_params\)/, m)
- assert_match(/@user\.errors/, m)
end
assert_instance_method :destroy, content do |m|
@@ -127,18 +125,6 @@ class ScaffoldControllerGeneratorTest < Rails::Generators::TestCase
assert_no_file "app/views/layouts/users.html.erb"
end
- def test_skip_html_if_required
- run_generator [ "User", "name:string", "age:integer", "--no-html" ]
- assert_no_file "app/helpers/users_helper.rb"
- assert_no_file "app/views/users"
-
- assert_file "app/controllers/users_controller.rb" do |content|
- assert_no_match(/format\.html/, content)
- assert_no_match(/def edit/, content)
- assert_no_match(/def new/, content)
- end
- end
-
def test_default_orm_is_used
run_generator ["User", "--orm=unknown"]
@@ -176,7 +162,7 @@ class ScaffoldControllerGeneratorTest < Rails::Generators::TestCase
def test_new_hash_style
run_generator
assert_file "app/controllers/users_controller.rb" do |content|
- assert_match(/\{ render action: "new" \}/, content)
+ assert_match(/render action: "new"/, content)
end
end
end
diff --git a/railties/test/generators/scaffold_generator_test.rb b/railties/test/generators/scaffold_generator_test.rb
index 431b23b014..357f472a3f 100644
--- a/railties/test/generators/scaffold_generator_test.rb
+++ b/railties/test/generators/scaffold_generator_test.rb
@@ -41,12 +41,10 @@ class ScaffoldGeneratorTest < Rails::Generators::TestCase
assert_instance_method :create, content do |m|
assert_match(/@product_line = ProductLine\.new\(product_line_params\)/, m)
assert_match(/@product_line\.save/, m)
- assert_match(/@product_line\.errors/, m)
end
assert_instance_method :update, content do |m|
assert_match(/@product_line\.update\(product_line_params\)/, m)
- assert_match(/@product_line\.errors/, m)
end
assert_instance_method :destroy, content do |m|
@@ -158,12 +156,10 @@ class ScaffoldGeneratorTest < Rails::Generators::TestCase
assert_instance_method :create, content do |m|
assert_match(/@admin_role = Admin::Role\.new\(admin_role_params\)/, m)
assert_match(/@admin_role\.save/, m)
- assert_match(/@admin_role\.errors/, m)
end
assert_instance_method :update, content do |m|
assert_match(/@admin_role\.update\(admin_role_params\)/, m)
- assert_match(/@admin_role\.errors/, m)
end
assert_instance_method :destroy, content do |m|
@@ -257,11 +253,6 @@ class ScaffoldGeneratorTest < Rails::Generators::TestCase
assert_no_file "app/assets/stylesheets/posts.css"
end
- def test_scaffold_generator_no_html
- run_generator [ "posts", "--no-html" ]
- assert_no_file "app/assets/stylesheets/scaffold.css"
- end
-
def test_scaffold_generator_no_javascripts
run_generator [ "posts", "--no-javascripts" ]
assert_file "app/assets/stylesheets/scaffold.css"