cargo fmt

This commit is contained in:
holger krekel
2019-10-06 23:09:57 +02:00
parent b3cd80ba6d
commit 32ef0d4dc3

View File

@@ -1,6 +1,6 @@
use core::cmp::{min,max}; use core::cmp::{max, min};
use std::ffi::CString; use std::ffi::CString;
use std::path::{Path,PathBuf}; use std::path::{Path, PathBuf};
use std::ptr; use std::ptr;
use num_traits::FromPrimitive; use num_traits::FromPrimitive;
@@ -596,7 +596,9 @@ fn export_backup(context: &Context, dir: impl AsRef<Path>) -> Result<()> {
Err(err) Err(err)
} }
Ok(()) => { Ok(()) => {
context.sql.set_raw_config_int(context, "backup_time", now as i32)?; context
.sql
.set_raw_config_int(context, "backup_time", now as i32)?;
context.call_cb(Event::ImexFileWritten(dest_path_filename.clone())); context.call_cb(Event::ImexFileWritten(dest_path_filename.clone()));
Ok(()) Ok(())
} }
@@ -607,67 +609,65 @@ fn add_files_to_export(context: &Context, dest_path_filename: &PathBuf) -> Resul
// add all files as blobs to the database copy (this does not require // add all files as blobs to the database copy (this does not require
// the source to be locked, neigher the destination as it is used only here) // the source to be locked, neigher the destination as it is used only here)
let sql = Sql::new(); let sql = Sql::new();
ensure!(sql.open(context, &dest_path_filename, 0), "could not open db"); ensure!(
if !sql.table_exists("backup_blobs") { sql.open(context, &dest_path_filename, 0),
sql::execute( "could not open db"
context, );
&sql, if !sql.table_exists("backup_blobs") {
"CREATE TABLE backup_blobs (id INTEGER PRIMARY KEY, file_name, file_content);", sql::execute(
params![], context,
)? &sql,
} "CREATE TABLE backup_blobs (id INTEGER PRIMARY KEY, file_name, file_content);",
// copy all files from BLOBDIR into backup-db params![],
let mut total_files_cnt = 0; )?
let dir = context.get_blobdir(); }
let dir_handle = std::fs::read_dir(&dir)?; // copy all files from BLOBDIR into backup-db
total_files_cnt += dir_handle.filter(|r| r.is_ok()).count(); let mut total_files_cnt = 0;
let dir = context.get_blobdir();
let dir_handle = std::fs::read_dir(&dir)?;
total_files_cnt += dir_handle.filter(|r| r.is_ok()).count();
info!(context, "EXPORT: total_files_cnt={}", total_files_cnt); info!(context, "EXPORT: total_files_cnt={}", total_files_cnt);
// scan directory, pass 2: copy files // scan directory, pass 2: copy files
let dir_handle = std::fs::read_dir(&dir)?; let dir_handle = std::fs::read_dir(&dir)?;
sql.prepare( sql.prepare(
"INSERT INTO backup_blobs (file_name, file_content) VALUES (?, ?);", "INSERT INTO backup_blobs (file_name, file_content) VALUES (?, ?);",
|mut stmt, _| { |mut stmt, _| {
let mut processed_files_cnt = 0; let mut processed_files_cnt = 0;
for entry in dir_handle { for entry in dir_handle {
let entry = entry?; let entry = entry?;
if context if context
.running_state .running_state
.clone() .clone()
.read() .read()
.unwrap() .unwrap()
.shall_stop_ongoing .shall_stop_ongoing
{ {
bail!("canceled during export-files"); bail!("canceled during export-files");
} }
processed_files_cnt += 1; processed_files_cnt += 1;
let permille = max(min( let permille = max(min(processed_files_cnt * 1000 / total_files_cnt, 990), 10);
processed_files_cnt * 1000 / total_files_cnt, context.call_cb(Event::ImexProgress(permille));
990), 10);
context.call_cb(Event::ImexProgress(permille));
let name_f = entry.file_name(); let name_f = entry.file_name();
let name = name_f.to_string_lossy(); let name = name_f.to_string_lossy();
if name.starts_with("delta-chat") && name.ends_with(".bak") if name.starts_with("delta-chat") && name.ends_with(".bak") {
{ continue;
continue; }
} info!(context, "EXPORT: copying filename={}", name);
info!(context, "EXPORT: copying filename={}", name); let curr_path_filename = context.get_blobdir().join(entry.file_name());
let curr_path_filename = context.get_blobdir().join(entry.file_name()); if let Ok(buf) = dc_read_file(context, &curr_path_filename) {
if let Ok(buf) = if buf.is_empty() {
dc_read_file(context, &curr_path_filename) continue;
{ }
if buf.is_empty() { // bail out if we can't insert
continue; stmt.execute(params![name, buf])?;
} }
// bail out if we can't insert }
stmt.execute(params![name, buf])?; Ok(())
} },
} )?;
Ok(()) Ok(())
}
)?;
Ok(())
} }
/******************************************************************************* /*******************************************************************************
@@ -686,67 +686,67 @@ fn import_self_keys(context: &Context, dir: impl AsRef<Path>) -> Result<()> {
let dir_name = dir.as_ref().to_string_lossy(); let dir_name = dir.as_ref().to_string_lossy();
let dir_handle = std::fs::read_dir(&dir)?; let dir_handle = std::fs::read_dir(&dir)?;
for entry in dir_handle { for entry in dir_handle {
let entry_fn = entry?.file_name(); let entry_fn = entry?.file_name();
let name_f = entry_fn.to_string_lossy(); let name_f = entry_fn.to_string_lossy();
let path_plus_name = dir.as_ref().join(&entry_fn); let path_plus_name = dir.as_ref().join(&entry_fn);
match dc_get_filesuffix_lc(&name_f) { match dc_get_filesuffix_lc(&name_f) {
Some(suffix) => { Some(suffix) => {
if suffix != "asc" { if suffix != "asc" {
continue;
}
set_default = if name_f.contains("legacy") {
info!(context, "found legacy key '{}'", path_plus_name.display());
false
} else {
true
}
}
None => {
continue; continue;
} }
set_default = if name_f.contains("legacy") {
info!(context, "found legacy key '{}'", path_plus_name.display());
false
} else {
true
}
} }
let ccontent = if let Ok(content) = dc_read_file(context, &path_plus_name) { None => {
key = String::from_utf8_lossy(&content).to_string();
CString::new(content).unwrap_or_default()
} else {
continue;
};
/* only import if we have a private key */
let mut buf2_headerline = String::default();
let split_res: bool;
unsafe {
let buf2 = dc_strdup(ccontent.as_ptr());
split_res = dc_split_armored_data(
buf2,
&mut buf2_headerline,
ptr::null_mut(),
ptr::null_mut(),
ptr::null_mut(),
);
libc::free(buf2 as *mut libc::c_void);
}
if split_res
&& buf2_headerline.contains("-----BEGIN PGP PUBLIC KEY BLOCK-----")
&& !key.contains("-----BEGIN PGP PRIVATE KEY BLOCK")
{
info!(context, "ignoring public key file '{}", name_f);
// it's fine: DC exports public with private
continue; continue;
} }
if let Err(err) = set_self_key(context, &key, set_default, false) {
error!(context, "set_self_key: {}", err);
continue;
}
imported_cnt += 1
} }
ensure!( let ccontent = if let Ok(content) = dc_read_file(context, &path_plus_name) {
imported_cnt > 0, key = String::from_utf8_lossy(&content).to_string();
"No private keys found in \"{}\".", CString::new(content).unwrap_or_default()
dir_name } else {
); continue;
Ok(()) };
/* only import if we have a private key */
let mut buf2_headerline = String::default();
let split_res: bool;
unsafe {
let buf2 = dc_strdup(ccontent.as_ptr());
split_res = dc_split_armored_data(
buf2,
&mut buf2_headerline,
ptr::null_mut(),
ptr::null_mut(),
ptr::null_mut(),
);
libc::free(buf2 as *mut libc::c_void);
}
if split_res
&& buf2_headerline.contains("-----BEGIN PGP PUBLIC KEY BLOCK-----")
&& !key.contains("-----BEGIN PGP PRIVATE KEY BLOCK")
{
info!(context, "ignoring public key file '{}", name_f);
// it's fine: DC exports public with private
continue;
}
if let Err(err) = set_self_key(context, &key, set_default, false) {
error!(context, "set_self_key: {}", err);
continue;
}
imported_cnt += 1
}
ensure!(
imported_cnt > 0,
"No private keys found in \"{}\".",
dir_name
);
Ok(())
} }
fn export_self_keys(context: &Context, dir: impl AsRef<Path>) -> Result<()> { fn export_self_keys(context: &Context, dir: impl AsRef<Path>) -> Result<()> {