forked from mongodb/mongo-ruby-driver
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathRakefile
286 lines (236 loc) · 8.71 KB
/
Rakefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
# -*- mode: ruby -*-
require 'bundler'
require 'bundler/gem_tasks'
require 'rspec/core/rake_task'
# TODO move the mongo require into the individual tasks that actually need it
require 'mongo'
ROOT = File.expand_path(File.join(File.dirname(__FILE__)))
$: << File.join(ROOT, 'spec/shared/lib')
require 'mrss/spec_organizer'
CLASSIFIERS = [
[%r,^mongo/server,, :unit_server],
[%r,^mongo,, :unit],
[%r,^kerberos,, :unit],
[%r,^integration/sdam_error_handling,, :sdam_integration],
[%r,^integration/cursor_reaping,, :cursor_reaping],
[%r,^integration/query_cache,, :query_cache],
[%r,^integration/transactions_examples,, :tx_examples],
[%r,^(atlas|integration),, :integration],
[%r,^spec_tests/sdam_integration,, :spec_sdam_integration],
[%r,^spec_tests,, :spec],
]
RUN_PRIORITY = %i(
tx_examples
unit unit_server
integration sdam_integration cursor_reaping query_cache
spec spec_sdam_integration
)
tasks = Rake.application.instance_variable_get('@tasks')
tasks['release:do'] = tasks.delete('release')
RSpec::Core::RakeTask.new(:spec) do |t|
#t.rspec_opts = "--profile 5" if ENV['CI']
end
task :default => ['spec:prepare', :spec]
namespace :spec do
desc 'Creates necessary user accounts in the cluster'
task :prepare do
$: << File.join(File.dirname(__FILE__), 'spec')
require 'support/utils'
require 'support/spec_setup'
SpecSetup.new.run
end
desc 'Waits for sessions to be available in the deployment'
task :wait_for_sessions do
$: << File.join(File.dirname(__FILE__), 'spec')
require 'support/utils'
require 'support/spec_config'
require 'support/client_registry'
client = ClientRegistry.instance.global_client('authorized')
client.database.command(ping: 1)
deadline = Time.now + 300
loop do
begin
client.cluster.validate_session_support!
break
rescue Mongo::Error::SessionsNotSupported
if Time.now >= deadline
raise "Sessions did not become supported in 300 seconds"
end
client.cluster.scan!
end
end
end
desc 'Prints configuration used by the test suite'
task :config do
$: << File.join(File.dirname(__FILE__), 'spec')
# Since this task is usually used for troubleshooting of test suite
# configuration, leave driver log level at the default of debug to
# have connection diagnostics printed during handshakes and such.
require 'support/utils'
require 'support/spec_config'
require 'support/client_registry'
SpecConfig.instance.print_summary
end
def spec_organizer
Mrss::SpecOrganizer.new(
root: ROOT,
classifiers: CLASSIFIERS,
priority_order: RUN_PRIORITY,
)
end
task :ci => ['spec:prepare'] do
spec_organizer.run
end
desc 'Show test buckets'
task :buckets do
spec_organizer.ordered_buckets.each do |category, paths|
puts "#{category || 'remaining'}: #{paths&.join(' ') || '<none>'}"
end
end
end
namespace :release do
task :check_private_key do
unless File.exist?('gem-private_key.pem')
raise "No private key present, cannot release"
end
end
end
task :release => ['release:check_private_key', 'release:do']
desc "Generate all documentation"
task :docs => 'docs:yard'
namespace :docs do
desc "Generate yard documention"
task :yard do
out = File.join('yard-docs', Mongo::VERSION)
FileUtils.rm_rf(out)
system "yardoc -o #{out} --title mongo-#{Mongo::VERSION}"
end
end
require_relative "profile/benchmarking"
# Some require data files, available from the drivers team. See the comments above each task for details."
namespace :benchmark do
desc "Run the driver benchmark tests."
namespace :micro do
desc "Run the common driver micro benchmarking tests"
namespace :flat do
desc "Benchmarking for flat bson documents."
# Requirement: A file in Mongo::Benchmarking::DATA_PATH, called flat_bson.json.
task :encode do
puts "MICRO BENCHMARK:: FLAT:: ENCODE"
Mongo::Benchmarking::Micro.run(:flat, :encode)
end
# Requirement: A file in Mongo::Benchmarking::DATA_PATH, called flat_bson.json.
task :decode do
puts "MICRO BENCHMARK:: FLAT:: DECODE"
Mongo::Benchmarking::Micro.run(:flat, :decode)
end
end
namespace :deep do
desc "Benchmarking for deep bson documents."
# Requirement: A file in Mongo::Benchmarking::DATA_PATH, called deep_bson.json.
task :encode do
puts "MICRO BENCHMARK:: DEEP:: ENCODE"
Mongo::Benchmarking::Micro.run(:deep, :encode)
end
# Requirement: A file in Mongo::Benchmarking::DATA_PATH, called deep_bson.json.
task :decode do
puts "MICRO BENCHMARK:: DEEP:: DECODE"
Mongo::Benchmarking::Micro.run(:deep, :decode)
end
end
namespace :full do
desc "Benchmarking for full bson documents."
# Requirement: A file in Mongo::Benchmarking::DATA_PATH, called full_bson.json.
task :encode do
puts "MICRO BENCHMARK:: FULL:: ENCODE"
Mongo::Benchmarking::Micro.run(:full, :encode)
end
# Requirement: A file in Mongo::Benchmarking::DATA_PATH, called full_bson.json.
task :decode do
puts "MICRO BENCHMARK:: FULL:: DECODE"
Mongo::Benchmarking::Micro.run(:full, :decode)
end
end
end
namespace :single_doc do
desc "Run the common driver single-document benchmarking tests"
task :command do
puts "SINGLE DOC BENCHMARK:: COMMAND"
Mongo::Benchmarking::SingleDoc.run(:command)
end
# Requirement: A file in Mongo::Benchmarking::DATA_PATH, called TWEET.json.
task :find_one do
puts "SINGLE DOC BENCHMARK:: FIND ONE BY ID"
Mongo::Benchmarking::SingleDoc.run(:find_one)
end
# Requirement: A file in Mongo::Benchmarking::DATA_PATH, called SMALL_DOC.json.
task :insert_one_small do
puts "SINGLE DOC BENCHMARK:: INSERT ONE SMALL DOCUMENT"
Mongo::Benchmarking::SingleDoc.run(:insert_one_small)
end
# Requirement: A file in Mongo::Benchmarking::DATA_PATH, called LARGE_DOC.json.
task :insert_one_large do
puts "SINGLE DOC BENCHMARK:: INSERT ONE LARGE DOCUMENT"
Mongo::Benchmarking::SingleDoc.run(:insert_one_large)
end
end
namespace :multi_doc do
desc "Run the common driver multi-document benchmarking tests"
# Requirement: A file in Mongo::Benchmarking::DATA_PATH, called TWEET.json.
task :find_many do
puts "MULTI DOCUMENT BENCHMARK:: FIND MANY"
Mongo::Benchmarking::MultiDoc.run(:find_many)
end
# Requirement: A file in Mongo::Benchmarking::DATA_PATH, called SMALL_DOC.json.
task :bulk_insert_small do
puts "MULTI DOCUMENT BENCHMARK:: BULK INSERT SMALL"
Mongo::Benchmarking::MultiDoc.run(:bulk_insert_small)
end
# Requirement: A file in Mongo::Benchmarking::DATA_PATH, called LARGE_DOC.json.
task :bulk_insert_large do
puts "MULTI DOCUMENT BENCHMARK:: BULK INSERT LARGE"
Mongo::Benchmarking::MultiDoc.run(:bulk_insert_large)
end
# Requirement: A file in Mongo::Benchmarking::DATA_PATH, called GRIDFS_LARGE.
task :gridfs_upload do
puts "MULTI DOCUMENT BENCHMARK:: GRIDFS UPLOAD"
Mongo::Benchmarking::MultiDoc.run(:gridfs_upload)
end
# Requirement: A file in Mongo::Benchmarking::DATA_PATH, called GRIDFS_LARGE.
task :gridfs_download do
puts "MULTI DOCUMENT BENCHMARK:: GRIDFS DOWNLOAD"
Mongo::Benchmarking::MultiDoc.run(:gridfs_download)
end
end
namespace :parallel do
desc "Run the common driver paralell ETL benchmarking tests"
# Requirement: A directory in Mongo::Benchmarking::DATA_PATH, called LDJSON_MULTI,
# with the files used in this task.
task :import do
puts "PARALLEL ETL BENCHMARK:: IMPORT"
Mongo::Benchmarking::Parallel.run(:import)
end
# Requirement: A directory in Mongo::Benchmarking::DATA_PATH, called LDJSON_MULTI,
# with the files used in this task.
# Requirement: Another directory in "#{Mongo::Benchmarking::DATA_PATH}/LDJSON_MULTI"
# called 'output'.
task :export do
puts "PARALLEL ETL BENCHMARK:: EXPORT"
Mongo::Benchmarking::Parallel.run(:export)
end
# Requirement: A directory in Mongo::Benchmarking::DATA_PATH, called GRIDFS_MULTI,
# with the files used in this task.
task :gridfs_upload do
puts "PARALLEL ETL BENCHMARK:: GRIDFS UPLOAD"
Mongo::Benchmarking::Parallel.run(:gridfs_upload)
end
# Requirement: A directory in Mongo::Benchmarking::DATA_PATH, called GRIDFS_MULTI,
# with the files used in this task.
# Requirement: Another directory in "#{Mongo::Benchmarking::DATA_PATH}/GRIDFS_MULTI"
# called 'output'.
task :gridfs_download do
puts "PARALLEL ETL BENCHMARK:: GRIDFS DOWNLOAD"
Mongo::Benchmarking::Parallel.run(:gridfs_download)
end
end
end