17
17
-module (rabbit_definitions ).
18
18
-include_lib (" rabbit_common/include/rabbit.hrl" ).
19
19
20
+ -export ([boot /0 ]).
20
21
% % automatic import on boot
21
22
-export ([maybe_load_definitions /0 , maybe_load_definitions_from /2 ]).
22
23
% % import
58
59
59
60
-export_type ([definition_object / 0 , definition_list / 0 , definition_category / 0 , definitions / 0 ]).
60
61
62
+ -define (IMPORT_WORK_POOL , definition_import_pool ).
63
+
64
+ boot () ->
65
+ PoolSize = application :get_env (rabbit , definition_import_work_pool_size , rabbit_runtime :guess_number_of_cpu_cores ()),
66
+ rabbit_sup :start_supervisor_child (definition_import_pool_sup , worker_pool_sup , [PoolSize , ? IMPORT_WORK_POOL ]).
67
+
61
68
maybe_load_definitions () ->
62
69
% % this feature was a part of rabbitmq-management for a long time,
63
70
% % so we check rabbit_management.load_definitions for backward compatibility.
@@ -224,20 +231,22 @@ apply_defs(Map, ActingUser, VHost) when is_binary(VHost) ->
224
231
apply_defs (Map , ActingUser , SuccessFun ) when is_function (SuccessFun ) ->
225
232
Version = maps :get (rabbitmq_version , Map , maps :get (rabbit_version , Map , undefined )),
226
233
try
227
- for_all (users , ActingUser , Map ,
234
+ concurrent_for_all (users , ActingUser , Map ,
228
235
fun (User , _Username ) ->
229
236
rabbit_auth_backend_internal :put_user (User , Version , ActingUser )
230
237
end ),
231
- for_all (vhosts , ActingUser , Map , fun add_vhost /2 ),
238
+ concurrent_for_all (vhosts , ActingUser , Map , fun add_vhost /2 ),
232
239
validate_limits (Map ),
233
- for_all (permissions , ActingUser , Map , fun add_permission /2 ),
234
- for_all (topic_permissions , ActingUser , Map , fun add_topic_permission /2 ),
235
- for_all (parameters , ActingUser , Map , fun add_parameter /2 ),
236
- for_all (global_parameters , ActingUser , Map , fun add_global_parameter /2 ),
237
- for_all (policies , ActingUser , Map , fun add_policy /2 ),
238
- for_all (queues , ActingUser , Map , fun add_queue /2 ),
239
- for_all (exchanges , ActingUser , Map , fun add_exchange /2 ),
240
- for_all (bindings , ActingUser , Map , fun add_binding /2 ),
240
+ concurrent_for_all (permissions , ActingUser , Map , fun add_permission /2 ),
241
+ concurrent_for_all (topic_permissions , ActingUser , Map , fun add_topic_permission /2 ),
242
+ sequential_for_all (parameters , ActingUser , Map , fun add_parameter /2 ),
243
+ sequential_for_all (global_parameters , ActingUser , Map , fun add_global_parameter /2 ),
244
+ % % importing policies concurrently can be unsafe as queues will be getting
245
+ % % potentially out of order notifications of applicable policy changes
246
+ sequential_for_all (policies , ActingUser , Map , fun add_policy /2 ),
247
+ concurrent_for_all (queues , ActingUser , Map , fun add_queue /2 ),
248
+ concurrent_for_all (exchanges , ActingUser , Map , fun add_exchange /2 ),
249
+ concurrent_for_all (bindings , ActingUser , Map , fun add_binding /2 ),
241
250
SuccessFun (),
242
251
ok
243
252
catch {error , E } -> {error , E };
@@ -254,11 +263,13 @@ apply_defs(Map, ActingUser, SuccessFun, VHost) when is_binary(VHost) ->
254
263
[VHost , ActingUser ]),
255
264
try
256
265
validate_limits (Map , VHost ),
257
- for_all (parameters , ActingUser , Map , VHost , fun add_parameter /3 ),
258
- for_all (policies , ActingUser , Map , VHost , fun add_policy /3 ),
259
- for_all (queues , ActingUser , Map , VHost , fun add_queue /3 ),
260
- for_all (exchanges , ActingUser , Map , VHost , fun add_exchange /3 ),
261
- for_all (bindings , ActingUser , Map , VHost , fun add_binding /3 ),
266
+ sequential_for_all (parameters , ActingUser , Map , VHost , fun add_parameter /3 ),
267
+ % % importing policies concurrently can be unsafe as queues will be getting
268
+ % % potentially out of order notifications of applicable policy changes
269
+ sequential_for_all (policies , ActingUser , Map , VHost , fun add_policy /3 ),
270
+ concurrent_for_all (queues , ActingUser , Map , VHost , fun add_queue /3 ),
271
+ concurrent_for_all (exchanges , ActingUser , Map , VHost , fun add_exchange /3 ),
272
+ concurrent_for_all (bindings , ActingUser , Map , VHost , fun add_binding /3 ),
262
273
SuccessFun ()
263
274
catch {error , E } -> {error , format (E )};
264
275
exit :E -> {error , format (E )}
@@ -275,17 +286,19 @@ apply_defs(Map, ActingUser, SuccessFun, ErrorFun, VHost) ->
275
286
[VHost , ActingUser ]),
276
287
try
277
288
validate_limits (Map , VHost ),
278
- for_all (parameters , ActingUser , Map , VHost , fun add_parameter /3 ),
279
- for_all (policies , ActingUser , Map , VHost , fun add_policy /3 ),
280
- for_all (queues , ActingUser , Map , VHost , fun add_queue /3 ),
281
- for_all (exchanges , ActingUser , Map , VHost , fun add_exchange /3 ),
282
- for_all (bindings , ActingUser , Map , VHost , fun add_binding /3 ),
289
+ sequential_for_all (parameters , ActingUser , Map , VHost , fun add_parameter /3 ),
290
+ % % importing policies concurrently can be unsafe as queues will be getting
291
+ % % potentially out of order notifications of applicable policy changes
292
+ sequential_for_all (policies , ActingUser , Map , VHost , fun add_policy /3 ),
293
+ concurrent_for_all (queues , ActingUser , Map , VHost , fun add_queue /3 ),
294
+ concurrent_for_all (exchanges , ActingUser , Map , VHost , fun add_exchange /3 ),
295
+ concurrent_for_all (bindings , ActingUser , Map , VHost , fun add_binding /3 ),
283
296
SuccessFun ()
284
297
catch {error , E } -> ErrorFun (format (E ));
285
298
exit :E -> ErrorFun (format (E ))
286
299
end .
287
300
288
- for_all (Category , ActingUser , Definitions , Fun ) ->
301
+ sequential_for_all (Category , ActingUser , Definitions , Fun ) ->
289
302
case maps :get (rabbit_data_coercion :to_atom (Category ), Definitions , undefined ) of
290
303
undefined -> ok ;
291
304
List ->
@@ -295,14 +308,12 @@ for_all(Category, ActingUser, Definitions, Fun) ->
295
308
end ,
296
309
[begin
297
310
% % keys are expected to be atoms
298
- Atomized = maps :fold (fun (K , V , Acc ) ->
299
- maps :put (rabbit_data_coercion :to_atom (K ), V , Acc )
300
- end , #{}, M ),
311
+ Atomized = atomize_keys (M ),
301
312
Fun (Atomized , ActingUser )
302
313
end || M <- List , is_map (M )]
303
314
end .
304
315
305
- for_all (Name , ActingUser , Definitions , VHost , Fun ) ->
316
+ sequential_for_all (Name , ActingUser , Definitions , VHost , Fun ) ->
306
317
307
318
case maps :get (rabbit_data_coercion :to_atom (Name ), Definitions , undefined ) of
308
319
undefined -> ok ;
@@ -311,6 +322,57 @@ for_all(Name, ActingUser, Definitions, VHost, Fun) ->
311
322
M <- List , is_map (M )]
312
323
end .
313
324
325
+ concurrent_for_all (Category , ActingUser , Definitions , Fun ) ->
326
+ case maps :get (rabbit_data_coercion :to_atom (Category ), Definitions , undefined ) of
327
+ undefined -> ok ;
328
+ List ->
329
+ case length (List ) of
330
+ 0 -> ok ;
331
+ N -> rabbit_log :info (" Importing ~p ~s ..." , [N , human_readable_category_name (Category )])
332
+ end ,
333
+ {ok , Gatherer } = gatherer :start_link (),
334
+ [begin
335
+ % % keys are expected to be atoms
336
+ Atomized = atomize_keys (M ),
337
+ ok = gatherer :fork (Gatherer ),
338
+ worker_pool :submit_async (
339
+ ? IMPORT_WORK_POOL ,
340
+ fun () ->
341
+ Fun (Atomized , ActingUser ),
342
+ gatherer :finish (Gatherer )
343
+ end )
344
+ end || M <- List , is_map (M )],
345
+ gatherer :out (Gatherer ),
346
+ gatherer :stop (Gatherer )
347
+ end .
348
+
349
+ concurrent_for_all (Name , ActingUser , Definitions , VHost , Fun ) ->
350
+ case maps :get (rabbit_data_coercion :to_atom (Name ), Definitions , undefined ) of
351
+ undefined -> ok ;
352
+ List ->
353
+ {ok , Gatherer } = gatherer :start_link (),
354
+ [begin
355
+ % % keys are expected to be atoms
356
+ Atomized = M = atomize_keys (M ),
357
+ ok = gatherer :fork (Gatherer ),
358
+ worker_pool :submit_async (
359
+ ? IMPORT_WORK_POOL ,
360
+ fun () ->
361
+ Fun (VHost , Atomized , ActingUser ),
362
+ gatherer :finish (Gatherer )
363
+ end )
364
+ end || M <- List , is_map (M )],
365
+ gatherer :out (Gatherer ),
366
+ gatherer :stop (Gatherer )
367
+ end .
368
+
369
+ -spec atomize_keys (#{any () => any ()}) -> #{atom () => any ()}.
370
+
371
+ atomize_keys (M ) ->
372
+ maps :fold (fun (K , V , Acc ) ->
373
+ maps :put (rabbit_data_coercion :to_atom (K ), V , Acc )
374
+ end , #{}, M ).
375
+
314
376
-spec human_readable_category_name (definition_category ()) -> string ().
315
377
316
378
human_readable_category_name (topic_permissions ) -> " topic permissions" ;
@@ -390,6 +452,8 @@ add_policy(VHost, Param, Username) ->
390
452
exit (rabbit_data_coercion :to_binary (rabbit_misc :escape_html_tags (E ++ S )))
391
453
end .
392
454
455
+ -spec add_vhost (map (), rabbit_types :username ()) -> ok .
456
+
393
457
add_vhost (VHost , ActingUser ) ->
394
458
VHostName = maps :get (name , VHost , undefined ),
395
459
VHostTrace = maps :get (tracing , VHost , undefined ),
0 commit comments