@@ -115,16 +115,10 @@ def test_insert_batch_time_normal():
115
115
batch_size_automator .insert_batch_time (duration )
116
116
117
117
assert batch_size_automator .batch_times ["best" ]["avg_time" ] == duration
118
- assert (
119
- batch_size_automator .batch_times ["best" ]["batch_per_second" ]
120
- == initial_batch_size / duration
121
- )
118
+ assert batch_size_automator .batch_times ["best" ]["batch_per_second" ] == initial_batch_size / duration
122
119
# because for the next iteration batch_size is increased by 500
123
120
initial_step_size = 500
124
- assert (
125
- batch_size_automator .get_next_batch_size ()
126
- == initial_batch_size + initial_step_size
127
- )
121
+ assert batch_size_automator .get_next_batch_size () == initial_batch_size + initial_step_size
128
122
129
123
# Test Case 2:
130
124
# five will be the first best time making 3000 the best batchsize
@@ -135,10 +129,7 @@ def test_insert_batch_time_normal():
135
129
current_batch_per_second = current_batch_size / duration
136
130
137
131
assert batch_size_automator .batch_times ["best" ]["avg_time" ] == duration
138
- assert (
139
- batch_size_automator .batch_times ["best" ]["batch_per_second" ]
140
- == current_batch_per_second
141
- )
132
+ assert batch_size_automator .batch_times ["best" ]["batch_per_second" ] == current_batch_per_second
142
133
batch_size = 3000 + batch_size_automator .step_size * batch_size_automator .alpha
143
134
assert batch_size_automator .get_next_batch_size () == batch_size
144
135
@@ -149,14 +140,10 @@ def test_insert_batch_time_normal():
149
140
batch_size_automator .insert_batch_time (duration )
150
141
151
142
assert batch_size_automator .batch_times ["best" ]["avg_time" ] == 5 # last duration
152
- assert (
153
- batch_size_automator .batch_times ["best" ]["batch_per_second" ]
154
- == current_batch_per_second
155
- )
143
+ assert batch_size_automator .batch_times ["best" ]["batch_per_second" ] == current_batch_per_second
156
144
# batch_size is decreased this time because no better value was found and calculated based on best_size
157
145
batch_size = (
158
- batch_size_automator .batch_times ["best" ]["size" ]
159
- - batch_size_automator .step_size * batch_size_automator .alpha
146
+ batch_size_automator .batch_times ["best" ]["size" ] - batch_size_automator .step_size * batch_size_automator .alpha
160
147
)
161
148
assert batch_size_automator .get_next_batch_size () == batch_size
162
149
@@ -172,9 +159,7 @@ def test_insert_batch_time_normal():
172
159
== batch_size_automator .batch_times ["best" ]["size" ] / duration
173
160
)
174
161
# batch_size is further decreased
175
- batch_size = (
176
- batch_size - batch_size_automator .step_size * batch_size_automator .alpha
177
- )
162
+ batch_size = batch_size - batch_size_automator .step_size * batch_size_automator .alpha
178
163
assert batch_size_automator .get_next_batch_size () == batch_size
179
164
180
165
# Test Case 5:
@@ -233,16 +218,12 @@ def test_insert_batch_time_smallest_batch():
233
218
batch_size_automator .insert_batch_time (worse_duration )
234
219
235
220
# now we get better each time until we reach batch_size 1
236
- duration = (
237
- batch_size_automator .batch_size
238
- / batch_size_automator .batch_times ["best" ]["batch_per_second" ]
239
- ) - 10
221
+ duration = (batch_size_automator .batch_size / batch_size_automator .batch_times ["best" ]["batch_per_second" ]) - 10
240
222
batch_size = batch_size_automator .batch_size
241
223
while batch_size_automator .batch_size != 1 :
242
224
if batch_size != batch_size_automator .batch_size :
243
225
duration = (
244
- batch_size_automator .batch_size
245
- / batch_size_automator .batch_times ["best" ]["batch_per_second" ]
226
+ batch_size_automator .batch_size / batch_size_automator .batch_times ["best" ]["batch_per_second" ]
246
227
) - 10
247
228
batch_size = batch_size_automator .batch_size
248
229
batch_size_automator .insert_batch_time (duration )
@@ -283,55 +264,39 @@ def test_step_size_is_default():
283
264
def test_step_size_is_data_batch_size ():
284
265
default_step_size = 500
285
266
data_batch_size = 1000
286
- batch_size_automator = BatchSizeAutomator (
287
- 0 , data_batch_size = data_batch_size , step_size = default_step_size
288
- )
267
+ batch_size_automator = BatchSizeAutomator (0 , data_batch_size = data_batch_size , step_size = default_step_size )
289
268
290
269
assert batch_size_automator .step_size == data_batch_size
291
270
292
271
293
272
def test_batch_size_is_multitude_of_data_batch_size ():
294
273
data_batch_size = 500
295
274
batch_size = 700
296
- batch_size_automator = BatchSizeAutomator (
297
- batch_size , data_batch_size = data_batch_size
298
- )
275
+ batch_size_automator = BatchSizeAutomator (batch_size , data_batch_size = data_batch_size )
299
276
300
277
assert batch_size_automator .batch_size == 500
301
278
302
279
data_batch_size = 500
303
280
batch_size = 800
304
- batch_size_automator = BatchSizeAutomator (
305
- batch_size , data_batch_size = data_batch_size
306
- )
281
+ batch_size_automator = BatchSizeAutomator (batch_size , data_batch_size = data_batch_size )
307
282
308
283
assert batch_size_automator .batch_size == 1000
309
284
310
285
data_batch_size = 500
311
286
batch_size = 400
312
- batch_size_automator = BatchSizeAutomator (
313
- batch_size , data_batch_size = data_batch_size
314
- )
287
+ batch_size_automator = BatchSizeAutomator (batch_size , data_batch_size = data_batch_size )
315
288
316
289
assert batch_size_automator .batch_size == 500
317
290
318
291
319
292
def test_batch_size_change_is_at_least_data_batch_size ():
320
293
data_batch_size = 500
321
294
test_size = 1
322
- batch_size_automator = BatchSizeAutomator (
323
- 0 , data_batch_size = data_batch_size , test_size = test_size
324
- )
295
+ batch_size_automator = BatchSizeAutomator (0 , data_batch_size = data_batch_size , test_size = test_size )
325
296
326
297
# initial test cycle:
327
298
batch_size_automator .insert_batch_time (1 )
328
- assert (
329
- batch_size_automator .get_next_batch_size () == 3000
330
- ) # initially bath_size will go up
299
+ assert batch_size_automator .get_next_batch_size () == 3000 # initially bath_size will go up
331
300
# second test cycle:
332
- batch_size_automator .insert_batch_time (
333
- 2
334
- ) # worse batch performance reduces alpha and leads to smaller step_size
335
- assert (
336
- batch_size_automator .get_next_batch_size () == 2000
337
- ) # batch_size is changed by at least data_batch_size
301
+ batch_size_automator .insert_batch_time (2 ) # worse batch performance reduces alpha and leads to smaller step_size
302
+ assert batch_size_automator .get_next_batch_size () == 2000 # batch_size is changed by at least data_batch_size
0 commit comments