import_backup: do not load all blobs into memory at once

This commit is contained in:
Alexander Krotov
2020-07-15 23:27:01 +03:00
committed by holger krekel
parent 40dc72b2b1
commit f461e2a2fd

View File

@@ -449,27 +449,33 @@ async fn import_backup(context: &Context, backup_to_import: impl AsRef<Path>) ->
"***IMPORT-in-progress: total_files_cnt={:?}", total_files_cnt, "***IMPORT-in-progress: total_files_cnt={:?}", total_files_cnt,
); );
let files = context // Load IDs only for now, without the file contents, to avoid
// consuming too much memory.
let file_ids = context
.sql .sql
.query_map( .query_map(
"SELECT file_name, file_content FROM backup_blobs ORDER BY id;", "SELECT id FROM backup_blobs ORDER BY id",
paramsv![], paramsv![],
|row| { |row| row.get(0),
let name: String = row.get(0)?; |ids| {
let blob: Vec<u8> = row.get(1)?; ids.collect::<std::result::Result<Vec<i64>, _>>()
Ok((name, blob))
},
|files| {
files
.collect::<std::result::Result<Vec<_>, _>>()
.map_err(Into::into) .map_err(Into::into)
}, },
) )
.await?; .await?;
let mut all_files_extracted = true; let mut all_files_extracted = true;
for (processed_files_cnt, (file_name, file_blob)) in files.into_iter().enumerate() { for (processed_files_cnt, file_id) in file_ids.into_iter().enumerate() {
// Load a single blob into memory
let (file_name, file_blob) = context
.sql
.query_row(
"SELECT file_name, file_content FROM backup_blobs WHERE id = ?",
paramsv![file_id],
|row| Ok((row.get::<_, String>(0)?, row.get::<_, Vec<u8>>(1)?)),
)
.await?;
if context.shall_stop_ongoing().await { if context.shall_stop_ongoing().await {
all_files_extracted = false; all_files_extracted = false;
break; break;