|
14 | 14 | before(:all) { GC.disable } |
15 | 15 | after(:all) { GC.enable } |
16 | 16 |
|
| 17 | + SERIALIZERS = { |
| 18 | + fast_jsonapi: { |
| 19 | + name: 'Fast Serializer', |
| 20 | + hash_method: :serializable_hash, |
| 21 | + json_method: :serialized_json |
| 22 | + }, |
| 23 | + ams: { |
| 24 | + name: 'AMS serializer', |
| 25 | + speed_factor: 25, |
| 26 | + hash_method: :as_json |
| 27 | + }, |
| 28 | + jsonapi: { |
| 29 | + name: 'jsonapi-rb serializer' |
| 30 | + }, |
| 31 | + jsonapis: { |
| 32 | + name: 'jsonapi-serializers' |
| 33 | + } |
| 34 | + } |
| 35 | + |
17 | 36 | context 'when testing performance of serialization' do |
18 | 37 | it 'should create a hash of 1000 records in less than 50 ms' do |
19 | 38 | movies = 1000.times.map { |_i| movie } |
|
44 | 63 | end |
45 | 64 | end |
46 | 65 |
|
47 | | - def print_stats(message, count, ams_time, jsonapi_time, jsonapis_time, our_time) |
48 | | - format = '%-15s %-10s %s' |
49 | | - puts '' |
| 66 | + def print_stats(message, count, data) |
| 67 | + puts |
50 | 68 | puts message |
51 | | - puts format(format, 'Serializer', 'Records', 'Time') |
52 | | - puts format(format, 'AMS serializer', count, ams_time.round(2).to_s + ' ms') |
53 | | - puts format(format, 'jsonapi-rb serializer', count, jsonapi_time.round(2).to_s + ' ms') |
54 | | - puts format(format, 'jsonapi-serializers', count, jsonapis_time.round(2).to_s + ' ms') |
55 | | - puts format(format, 'Fast serializer', count, our_time.round(2).to_s + ' ms') |
| 69 | + |
| 70 | + name_length = SERIALIZERS.collect { |s| s[1].fetch(:name, s[0]).length }.max |
| 71 | + |
| 72 | + puts format("%-#{name_length+1}s %-10s %-10s %s", 'Serializer', 'Records', 'Time', 'Speed Up') |
| 73 | + |
| 74 | + report_format = "%-#{name_length+1}s %-10s %-10s" |
| 75 | + fast_jsonapi_time = data[:fast_jsonapi][:time] |
| 76 | + puts format(report_format, 'Fast serializer', count, fast_jsonapi_time.round(2).to_s + ' ms') |
| 77 | + |
| 78 | + data.reject { |k,v| k == :fast_jsonapi }.each_pair do |k,v| |
| 79 | + t = v[:time] |
| 80 | + factor = t / fast_jsonapi_time |
| 81 | + |
| 82 | + speed_factor = SERIALIZERS[k].fetch(:speed_factor, 1) |
| 83 | + result = factor >= speed_factor ? '✔' : '✘' |
| 84 | + |
| 85 | + puts format("%-#{name_length+1}s %-10s %-10s %sx %s", SERIALIZERS[k][:name], count, t.round(2).to_s + ' ms', factor.round(2), result) |
| 86 | + end |
56 | 87 | end |
57 | 88 |
|
58 | | - def run_hash_benchmark(message, movie_count, our_serializer, ams_serializer, jsonapi_serializer, jsonapis_serializer) |
59 | | - our_time = Benchmark.measure { our_serializer.serializable_hash }.real * 1000 |
60 | | - ams_time = Benchmark.measure { ams_serializer.as_json }.real * 1000 |
61 | | - jsonapi_time = Benchmark.measure { jsonapi_serializer.to_hash }.real * 1000 |
62 | | - jsonapis_time = Benchmark.measure { jsonapis_serializer.to_hash }.real * 1000 |
| 89 | + def run_hash_benchmark(message, movie_count, serializers) |
| 90 | + data = Hash[serializers.keys.collect { |k| [ k, { hash: nil, time: nil, speed_factor: nil }] }] |
| 91 | + |
| 92 | + serializers.each_pair do |k,v| |
| 93 | + hash_method = SERIALIZERS[k].key?(:hash_method) ? SERIALIZERS[k][:hash_method] : :to_hash |
| 94 | + data[k][:time] = Benchmark.measure { data[k][:hash] = v.send(hash_method) }.real * 1000 |
| 95 | + end |
| 96 | + |
| 97 | + print_stats(message, movie_count, data) |
63 | 98 |
|
64 | | - print_stats(message, movie_count, ams_time, jsonapi_time, jsonapis_time, our_time) |
| 99 | + data |
65 | 100 | end |
66 | 101 |
|
67 | | - def run_json_benchmark(message, movie_count, our_serializer, ams_serializer, jsonapi_serializer, jsonapis_serializer) |
68 | | - our_json = nil |
69 | | - ams_json = nil |
70 | | - jsonapi_json = nil |
71 | | - jsonapis_json = nil |
72 | | - our_time = Benchmark.measure { our_json = our_serializer.serialized_json }.real * 1000 |
73 | | - ams_time = Benchmark.measure { ams_json = ams_serializer.to_json }.real * 1000 |
74 | | - jsonapi_time = Benchmark.measure { jsonapi_json = jsonapi_serializer.to_json }.real * 1000 |
75 | | - jsonapis_time = Benchmark.measure { jsonapis_json = jsonapis_serializer.to_json }.real * 1000 |
76 | | - |
77 | | - print_stats(message, movie_count, ams_time, jsonapi_time, jsonapis_time, our_time) |
78 | | - return our_json, ams_json, jsonapi_json, jsonapis_json |
| 102 | + def run_json_benchmark(message, movie_count, serializers) |
| 103 | + data = Hash[serializers.keys.collect { |k| [ k, { json: nil, time: nil, speed_factor: nil }] }] |
| 104 | + |
| 105 | + serializers.each_pair do |k,v| |
| 106 | + json_method = SERIALIZERS[k].key?(:json_method) ? SERIALIZERS[k][:json_method] : :to_json |
| 107 | + data[k][:time] = Benchmark.measure { data[k][:json] = v.send(json_method) }.real * 1000 |
| 108 | + end |
| 109 | + |
| 110 | + print_stats(message, movie_count, data) |
| 111 | + |
| 112 | + data |
79 | 113 | end |
80 | 114 |
|
81 | 115 | context 'when comparing with AMS 0.10.x' do |
82 | 116 | [1, 25, 250, 1000].each do |movie_count| |
83 | | - speed_factor = 25 |
84 | | - it "should serialize #{movie_count} records atleast #{speed_factor} times faster than AMS" do |
| 117 | + it "should serialize #{movie_count} records atleast #{SERIALIZERS[:ams][:speed_factor]} times faster than AMS" do |
85 | 118 | ams_movies = build_ams_movies(movie_count) |
86 | 119 | movies = build_movies(movie_count) |
87 | 120 | jsonapi_movies = build_jsonapi_movies(movie_count) |
88 | 121 | jsonapis_movies = build_js_movies(movie_count) |
89 | | - our_serializer = MovieSerializer.new(movies) |
90 | | - ams_serializer = ActiveModelSerializers::SerializableResource.new(ams_movies) |
91 | | - jsonapi_serializer = JSONAPISerializer.new(jsonapi_movies) |
92 | | - jsonapis_serializer = JSONAPISSerializer.new(jsonapis_movies) |
| 122 | + |
| 123 | + serializers = { |
| 124 | + fast_jsonapi: MovieSerializer.new(movies), |
| 125 | + ams: ActiveModelSerializers::SerializableResource.new(ams_movies), |
| 126 | + jsonapi: JSONAPISerializer.new(jsonapi_movies), |
| 127 | + jsonapis: JSONAPISSerializer.new(jsonapis_movies) |
| 128 | + } |
93 | 129 |
|
94 | 130 | message = "Serialize to JSON string #{movie_count} records" |
95 | | - our_json, ams_json, _, _ = run_json_benchmark(message, movie_count, our_serializer, ams_serializer, jsonapi_serializer, jsonapis_serializer) |
| 131 | + json_benchmarks = run_json_benchmark(message, movie_count, serializers) |
96 | 132 |
|
97 | 133 | message = "Serialize to Ruby Hash #{movie_count} records" |
98 | | - run_hash_benchmark(message, movie_count, our_serializer, ams_serializer, jsonapi_serializer, jsonapis_serializer) |
| 134 | + hash_benchmarks = run_hash_benchmark(message, movie_count, serializers) |
| 135 | + |
| 136 | + # json |
| 137 | + expect(json_benchmarks[:fast_jsonapi][:json].length).to eq json_benchmarks[:ams][:json].length |
| 138 | + json_speed_up = json_benchmarks[:ams][:time] / json_benchmarks[:fast_jsonapi][:time] |
| 139 | + expect(json_speed_up).to be >= SERIALIZERS[:ams][:speed_factor] |
99 | 140 |
|
100 | | - expect(our_json.length).to eq ams_json.length |
101 | | - expect { our_serializer.serialized_json }.to perform_faster_than { ams_serializer.to_json }.at_least(speed_factor).times |
102 | | - expect { our_serializer.serializable_hash }.to perform_faster_than { ams_serializer.as_json }.at_least(speed_factor).times |
| 141 | + # hash |
| 142 | + hash_speed_up = hash_benchmarks[:ams][:time] / hash_benchmarks[:fast_jsonapi][:time] |
| 143 | + expect(hash_speed_up).to be >= SERIALIZERS[:ams][:speed_factor] |
103 | 144 | end |
104 | 145 | end |
105 | 146 | end |
106 | 147 |
|
107 | 148 | context 'when comparing with AMS 0.10.x and with includes and meta' do |
108 | 149 | [1, 25, 250, 1000].each do |movie_count| |
109 | | - speed_factor = 25 |
110 | | - it "should serialize #{movie_count} records atleast #{speed_factor} times faster than AMS" do |
| 150 | + it "should serialize #{movie_count} records atleast #{SERIALIZERS[:ams][:speed_factor]} times faster than AMS" do |
111 | 151 | ams_movies = build_ams_movies(movie_count) |
112 | 152 | movies = build_movies(movie_count) |
113 | 153 | jsonapi_movies = build_jsonapi_movies(movie_count) |
114 | 154 | jsonapis_movies = build_js_movies(movie_count) |
| 155 | + |
115 | 156 | options = {} |
116 | 157 | options[:meta] = { total: movie_count } |
117 | 158 | options[:include] = [:actors, :movie_type] |
118 | | - our_serializer = MovieSerializer.new(movies, options) |
119 | | - ams_serializer = ActiveModelSerializers::SerializableResource.new(ams_movies, include: options[:include], meta: options[:meta]) |
120 | | - jsonapi_serializer = JSONAPISerializer.new(jsonapi_movies, include: options[:include], meta: options[:meta]) |
121 | | - jsonapis_serializer = JSONAPISSerializer.new(jsonapis_movies, include: options[:include].map{|i| i.to_s.dasherize}, meta: options[:meta]) |
| 159 | + |
| 160 | + serializers = { |
| 161 | + fast_jsonapi: MovieSerializer.new(movies, options), |
| 162 | + ams: ActiveModelSerializers::SerializableResource.new(ams_movies, include: options[:include], meta: options[:meta]), |
| 163 | + jsonapi: JSONAPISerializer.new(jsonapi_movies, include: options[:include], meta: options[:meta]), |
| 164 | + jsonapis: JSONAPISSerializer.new(jsonapis_movies, include: options[:include].map { |i| i.to_s.dasherize }, meta: options[:meta]) |
| 165 | + } |
122 | 166 |
|
123 | 167 | message = "Serialize to JSON string #{movie_count} with includes and meta" |
124 | | - our_json, ams_json = run_json_benchmark(message, movie_count, our_serializer, ams_serializer, jsonapi_serializer, jsonapis_serializer) |
| 168 | + json_benchmarks = run_json_benchmark(message, movie_count, serializers) |
125 | 169 |
|
126 | 170 | message = "Serialize to Ruby Hash #{movie_count} with includes and meta" |
127 | | - run_hash_benchmark(message, movie_count, our_serializer, ams_serializer, jsonapi_serializer, jsonapis_serializer) |
| 171 | + hash_benchmarks = run_hash_benchmark(message, movie_count, serializers) |
| 172 | + |
| 173 | + # json |
| 174 | + expect(json_benchmarks[:fast_jsonapi][:json].length).to eq json_benchmarks[:ams][:json].length |
| 175 | + json_speed_up = json_benchmarks[:ams][:time] / json_benchmarks[:fast_jsonapi][:time] |
| 176 | + expect(json_speed_up).to be >= SERIALIZERS[:ams][:speed_factor] |
128 | 177 |
|
129 | | - expect(our_json.length).to eq ams_json.length |
130 | | - expect { our_serializer.serialized_json }.to perform_faster_than { ams_serializer.to_json }.at_least(speed_factor).times |
131 | | - expect { our_serializer.serializable_hash }.to perform_faster_than { ams_serializer.as_json }.at_least(speed_factor).times |
| 178 | + # hash |
| 179 | + hash_speed_up = hash_benchmarks[:ams][:time] / hash_benchmarks[:fast_jsonapi][:time] |
| 180 | + expect(hash_speed_up).to be >= SERIALIZERS[:ams][:speed_factor] |
132 | 181 | end |
133 | 182 | end |
134 | 183 | end |
135 | 184 |
|
136 | 185 | context 'when comparing with AMS 0.10.x and with polymorphic has_many' do |
137 | 186 | [1, 25, 250, 1000].each do |group_count| |
138 | | - speed_factor = 25 |
139 | | - it "should serialize #{group_count} records at least #{speed_factor} times faster than AMS" do |
| 187 | + it "should serialize #{group_count} records at least #{SERIALIZERS[:ams][:speed_factor]} times faster than AMS" do |
140 | 188 | ams_groups = build_ams_groups(group_count) |
141 | 189 | groups = build_groups(group_count) |
142 | 190 | jsonapi_groups = build_jsonapi_groups(group_count) |
143 | 191 | jsonapis_groups = build_jsonapis_groups(group_count) |
| 192 | + |
144 | 193 | options = {} |
145 | | - our_serializer = GroupSerializer.new(groups, options) |
146 | | - ams_serializer = ActiveModelSerializers::SerializableResource.new(ams_groups) |
147 | | - jsonapi_serializer = JSONAPISerializerB.new(jsonapi_groups) |
148 | | - jsonapis_serializer = JSONAPISSerializerB.new(jsonapis_groups) |
| 194 | + |
| 195 | + serializers = { |
| 196 | + fast_jsonapi: GroupSerializer.new(groups, options), |
| 197 | + ams: ActiveModelSerializers::SerializableResource.new(ams_groups), |
| 198 | + jsonapi: JSONAPISerializerB.new(jsonapi_groups), |
| 199 | + jsonapis: JSONAPISSerializerB.new(jsonapis_groups) |
| 200 | + } |
149 | 201 |
|
150 | 202 | message = "Serialize to JSON string #{group_count} with polymorphic has_many" |
151 | | - our_json, ams_json, _, _ = run_json_benchmark(message, group_count, our_serializer, ams_serializer, jsonapi_serializer, jsonapis_serializer) |
| 203 | + json_benchmarks = run_json_benchmark(message, group_count, serializers) |
152 | 204 |
|
153 | 205 | message = "Serialize to Ruby Hash #{group_count} with polymorphic has_many" |
154 | | - run_hash_benchmark(message, group_count, our_serializer, ams_serializer, jsonapi_serializer, jsonapis_serializer) |
| 206 | + hash_benchmarks = run_hash_benchmark(message, group_count, serializers) |
| 207 | + |
| 208 | + # json |
| 209 | + expect(json_benchmarks[:fast_jsonapi][:json].length).to eq json_benchmarks[:ams][:json].length |
| 210 | + json_speed_up = json_benchmarks[:ams][:time] / json_benchmarks[:fast_jsonapi][:time] |
| 211 | + expect(json_speed_up).to be >= SERIALIZERS[:ams][:speed_factor] |
155 | 212 |
|
156 | | - expect(our_json.length).to eq ams_json.length |
157 | | - expect { our_serializer.serialized_json }.to perform_faster_than { ams_serializer.to_json }.at_least(speed_factor).times |
158 | | - expect { our_serializer.serializable_hash }.to perform_faster_than { ams_serializer.as_json }.at_least(speed_factor).times |
| 213 | + # hash |
| 214 | + hash_speed_up = hash_benchmarks[:ams][:time] / hash_benchmarks[:fast_jsonapi][:time] |
| 215 | + expect(hash_speed_up).to be >= SERIALIZERS[:ams][:speed_factor] |
159 | 216 | end |
160 | 217 | end |
161 | 218 | end |
|
0 commit comments