aboutsummaryrefslogtreecommitdiffstats
path: root/activesupport/lib/active_support/testing/performance.rb
blob: 11dd720e9ff5a21900d5a64199bdecbe5d3cd5fe (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
require 'rubygems'
gem 'ruby-prof', '>= 0.6.1'
require 'ruby-prof'

require 'fileutils'
require 'rails/version'

module ActiveSupport
  module Testing
    module Performance
      benchmark = ARGV.include?('--benchmark')  # HAX for rake test

      DEFAULTS = {
        :benchmark => benchmark,
        :runs => benchmark ? 20 : 4,
        :min_percent => 0.05,
        :metrics => [:process_time, :memory, :allocations],
        :formats => [:flat, :graph_html, :call_tree],
        :output => 'tmp/performance' }

      def self.included(base)
        base.extend ClassMethods
        base.class_inheritable_accessor :profile_options
        base.profile_options = DEFAULTS.dup
      end

      def run(result)
        return if method_name =~ /^default_test$/

        yield(self.class::STARTED, name)
        @_result = result

        run_warmup

        self.class.measure_modes.each do |measure_mode|
          data = run_profile(measure_mode)
          self.class.report_profile_total(data, measure_mode)
          self.class.record_results(full_test_name, data, measure_mode)
          result.add_run
        end

        yield(self.class::FINISHED, name)
      end

      protected
        def full_test_name
          "#{self.class.name}##{@method_name}"
        end

        def run_test
          run_callbacks :setup
          setup
          yield
        rescue ::Test::Unit::AssertionFailedError => e
          add_failure(e.message, e.backtrace)
        rescue StandardError, ScriptError
          add_error($!)
        ensure
          begin
            teardown
            run_callbacks :teardown, :enumerator => :reverse_each
          rescue ::Test::Unit::AssertionFailedError => e
            add_failure(e.message, e.backtrace)
          rescue StandardError, ScriptError
            add_error($!)
          end
        end

        def run_warmup
          puts
          print full_test_name

          run_test do
            bench = Benchmark.realtime do
              __send__(@method_name)
            end
            puts " (%.2fs warmup)" % bench
          end
        end

        def run_profile(measure_mode)
          RubyProf.benchmarking = profile_options[:benchmark]
          RubyProf.measure_mode = measure_mode

          print '  '
          profile_options[:runs].times do |i|
            run_test do
              begin
                GC.disable
                RubyProf.resume { __send__(@method_name) }
                print '.'
                $stdout.flush
              ensure
                GC.enable
              end
            end
          end

          RubyProf.stop
        end

      module ClassMethods
        def record_results(test_name, data, measure_mode)
          if RubyProf.benchmarking?
            record_benchmark(test_name, data, measure_mode)
          else
            record_profile(test_name, data, measure_mode)
          end
        end

        def report_profile_total(data, measure_mode)
          total_time =
            if RubyProf.benchmarking?
              data
            else
              data.threads.values.sum(0) do |method_infos|
                method_infos.sort.last.total_time
              end
            end

          format =
            case measure_mode
              when RubyProf::PROCESS_TIME, RubyProf::WALL_TIME
                "%.2f seconds"
              when RubyProf::MEMORY
                "%.2f bytes"
              when RubyProf::ALLOCATIONS
                "%d allocations"
              else
                "%.2f #{measure_mode}"
            end

          total = format % total_time
          puts "\n  #{ActiveSupport::Testing::Performance::Util.metric_name(measure_mode)}: #{total}\n"
        end

        def measure_modes
          ActiveSupport::Testing::Performance::Util.measure_modes(profile_options[:metrics])
        end

        def printer_classes
          ActiveSupport::Testing::Performance::Util.printer_classes(profile_options[:formats])
        end

        private
          def record_benchmark(test_name, data, measure_mode)
            bench_filename = "#{profile_options[:output]}/benchmarks.csv"

            if new_file = !File.exist?(bench_filename)
              FileUtils.mkdir_p(File.dirname(bench_filename))
            end

            File.open(bench_filename, 'ab') do |file|
              if new_file
                file.puts 'test,metric,measurement,runs,average,created_at,rails_version,ruby_engine,ruby_version,ruby_patchlevel,ruby_platform'
              end

              file.puts [test_name,
                ActiveSupport::Testing::Performance::Util.metric_name(measure_mode),
                data, profile_options[:runs], data / profile_options[:runs],
                Time.now.utc.xmlschema,
                Rails::VERSION::STRING,
                defined?(RUBY_ENGINE) ? RUBY_ENGINE : 'ruby',
                RUBY_VERSION, RUBY_PATCHLEVEL, RUBY_PLATFORM].join(',')
            end
          end

          def record_profile(test_name, data, measure_mode)
            printer_classes.each do |printer_class|
              fname = output_filename(test_name, printer, measure_mode)

              FileUtils.mkdir_p(File.dirname(fname))
              File.open(fname, 'wb') do |file|
                printer_class.new(data).print(file, profile_printer_options)
              end
            end
          end

          # The report filename is test_name + measure_mode + report_type
          def output_filename(test_name, printer, measure_mode)
            suffix =
              case printer
                when RubyProf::FlatPrinter; 'flat.txt'
                when RubyProf::GraphPrinter; 'graph.txt'
                when RubyProf::GraphHtmlPrinter; 'graph.html'
                when RubyProf::CallTreePrinter; 'tree.txt'
                else printer.to_s.downcase
              end

            "#{profile_options[:output]}/#{test_name}_#{ActiveSupport::Testing::Performance::Util.metric_name(measure_mode)}_#{suffix}"
          end

          def profile_printer_options
            profile_options.slice(:min_percent)
          end
      end

      module Util
        extend self

        def metric_name(measure_mode)
          case measure_mode
            when RubyProf::PROCESS_TIME; 'process_time'
            when RubyProf::WALL_TIME; 'wall_time'
            when RubyProf::MEMORY; 'memory'
            when RubyProf::ALLOCATIONS; 'allocations'
            else "measure#{measure_mode}"
          end
        end

        def measure_modes(metrics)
          ruby_prof_consts(metrics.map { |m| m.to_s.upcase })
        end

        def printer_classes(formats)
          ruby_prof_consts(formats.map { |f| "#{f.to_s.camelize}Printer" })
        end

        private
          def ruby_prof_consts(names)
            names.map { |name| RubyProf.const_get(name) rescue nil }.compact
          end
      end
    end
  end
end