preload<T> method
Future<Map<String, PreloadOperation> >
preload<T>({
- required Map<
String, Future< dataProviders,T> Function()> - CachePolicy? policy,
- Set<
String> ? tags, - int parallelism = 5,
- void onProgress(
- String key,
- PreloadStatus status,
- double progress
Preloads data into the cache.
The dataProviders
parameter is a map where the keys are the cache keys and the values are
functions that return the data to be cached.
The policy
parameter can be used to set a cache policy for all the data.
The tags
parameter can be used to associate tags with all the data.
The parallelism
parameter determines how many items to preload in parallel.
The onProgress
parameter is a callback that is called when an item is preloaded.
Returns a map where the keys are the cache keys and the values are the preload operations.
Implementation
Future<Map<String, PreloadOperation>> preload<T>({
required Map<String, Future<T> Function()> dataProviders,
CachePolicy? policy,
Set<String>? tags,
int parallelism = 5,
void Function(String key, PreloadStatus status, double progress)?
onProgress,
}) async {
if (dataProviders.isEmpty) {
_log.warning('No data providers specified for preloading');
return {};
}
_log.info('Preloading ${dataProviders.length} items');
// Create operations for each key
final operations = <String, PreloadOperation>{};
for (final key in dataProviders.keys) {
final operation = PreloadOperation(key: key);
operations[key] = operation;
_operations[key] = operation;
_preloadController.add(operation);
}
// Process in batches based on parallelism
final keys = dataProviders.keys.toList();
final totalItems = keys.length;
var completedItems = 0;
for (var i = 0; i < keys.length; i += parallelism) {
final end =
(i + parallelism < keys.length) ? i + parallelism : keys.length;
final batch = keys.sublist(i, end);
// Process batch in parallel
final futures = <Future<void>>[];
for (final key in batch) {
futures.add(_preloadItem(
key,
dataProviders[key]!,
operations[key]!,
policy: policy,
tags: tags,
onProgress: (status) {
completedItems++;
final progress = completedItems / totalItems;
onProgress?.call(key, status, progress);
},
));
}
// Wait for all items in the batch to complete
await Future.wait(futures);
}
_log.info('Preloading completed for ${dataProviders.length} items');
return operations;
}