Compare commits

..

1 Commits

Author SHA1 Message Date
link2xt
959ca06691 python: fail fast on the tests
Do not waste CI time running the rest of the tests
if CI is not going to be green anyway.
2023-03-22 12:35:27 +00:00
91 changed files with 2025 additions and 3437 deletions

View File

@@ -77,9 +77,6 @@ jobs:
- os: windows-latest
rust: 1.68.0
python: false # Python bindings compilation on Windows is not supported.
- os: macos-latest
rust: 1.68.0
python: 3.9
# Minimum Supported Rust Version = 1.64.0
#

View File

@@ -84,15 +84,35 @@ jobs:
contents: write
runs-on: "ubuntu-latest"
steps:
- name: Download built binaries
- name: Download deltachat-rpc-server-x86_64
uses: "actions/download-artifact@v3"
with:
name: "deltachat-rpc-server-x86_64"
path: "dist/deltachat-rpc-server-x86_64"
- name: Compose dist/ directory
run: |
mkdir dist
for x in x86_64 aarch64 armv7 win32.exe win64.exe; do
mv "deltachat-rpc-server-$x"/* "dist/deltachat-rpc-server-$x"
done
- name: Download deltachat-rpc-server-aarch64
uses: "actions/download-artifact@v3"
with:
name: "deltachat-rpc-server-aarch64"
path: "dist/deltachat-rpc-server-aarch64"
- name: Download deltachat-rpc-server-armv7
uses: "actions/download-artifact@v3"
with:
name: "deltachat-rpc-server-armv7"
path: "dist/deltachat-rpc-server-armv7"
- name: Download deltachat-rpc-server-win32.exe
uses: "actions/download-artifact@v3"
with:
name: "deltachat-rpc-server-win32.exe"
path: "dist/deltachat-rpc-server-win32.exe"
- name: Download deltachat-rpc-server-win64.exe
uses: "actions/download-artifact@v3"
with:
name: "deltachat-rpc-server-win64.exe"
path: "dist/deltachat-rpc-server-win64.exe"
- name: List downloaded artifacts
run: ls -l dist/
@@ -103,4 +123,4 @@ jobs:
run: |
gh release upload ${{ github.ref_name }} \
--repo ${{ github.repository }} \
dist/*
dist/deltachat-rpc-server-*

View File

@@ -3,26 +3,16 @@
## [Unreleased]
### Changes
- Add support for `--version` argument to `deltachat-rpc-server`. #4224
It can be used to check the installed version without starting the server.
### Fixes
- deltachat-rpc-client: fix bug in `Chat.send_message()`: invalid `MessageData` field `quotedMsg` instead of `quotedMsgId`
## [1.112.0] - 2023-03-23
### Changes
- "full message view" not needed because of footers that go to contact status #4151
- Pick up system's light/dark mode in generated message HTML #4150
- Support non-persistent configuration with DELTACHAT_* env
- Print deltachat-repl errors with causes. #4166
- Increase MSRV to 1.64. #4167
- Core takes care of stopping and re-starting IO itself where needed,
e.g. during backup creation.
It is no longer needed to call `dc_stop_io()`.
`dc_start_io()` can now be called at any time without harm. #4138
- Pick up system's light/dark mode in generated message HTML. #4150
- More accurate `maybe_add_bcc_self` device message text. #4175
- "Full message view" not needed because of footers that go to contact status. #4151
- Support non-persistent configuration with `DELTACHAT_*` env. #4154
- Print deltachat-repl errors with causes. #4166
e.g. during backup creation. It is no longer needed to call
dc_stop_io(). dc_start_io() can now be called at any time without
harm. #4138
- More accurate maybe_add_bcc_self device message text #4175
### Fixes
- Fix segmentation fault if `dc_context_unref()` is called during
@@ -33,13 +23,6 @@
- Do not emit "Failed to run incremental vacuum" warnings on success. #4160
- Ability to send backup over network and QR code to setup second device #4007
- Disable buffering during STARTTLS setup. #4190
- Add `DC_EVENT_IMAP_INBOX_IDLE` event to wait until the account
is ready for testing.
It is used to fix race condition between fetching
existing messages and starting the test. #4208
### API-Changes
- Use `tracing` crate for logging. #3960
## [1.111.0] - 2023-03-05
@@ -2331,6 +2314,5 @@ For a full list of changes, please see our closed Pull Requests:
https://github.com/deltachat/deltachat-core-rust/pulls?q=is%3Apr+is%3Aclosed
[unreleased]: https://github.com/deltachat/deltachat-core-rust/compare/v1.112.0...HEAD
[unreleased]: https://github.com/deltachat/deltachat-core-rust/compare/v1.111.0...HEAD
[1.111.0]: https://github.com/deltachat/deltachat-core-rust/compare/v1.110.0...v1.111.0
[1.112.0]: https://github.com/deltachat/deltachat-core-rust/compare/v1.111.0...v1.112.0

59
Cargo.lock generated
View File

@@ -271,15 +271,24 @@ dependencies = [
]
[[package]]
name = "async_zip"
version = "0.0.11"
name = "async_io_utilities"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c50d29ab7e2f9e808cca1a69ea56a36f4ff216f54a41a23aae1fd4afc05cc020"
checksum = "9b20cffc5590f4bf33f05f97a3ea587feba9c50d20325b401daa096b92ff7da0"
dependencies = [
"tokio",
]
[[package]]
name = "async_zip"
version = "0.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a36d43bdefc7215b2b3a97edd03b1553b7969ad76551025eedd3b913c645f6e"
dependencies = [
"async-compression",
"async_io_utilities",
"chrono",
"crc32fast",
"log",
"pin-project",
"thiserror",
"tokio",
]
@@ -1121,7 +1130,7 @@ dependencies = [
[[package]]
name = "default-net"
version = "0.13.1"
source = "git+https://github.com/dignifiedquire/default-net.git?rev=7a257095bac009c4be0b93c2979801624fdd337b#7a257095bac009c4be0b93c2979801624fdd337b"
source = "git+https://github.com/dignifiedquire/default-net.git?branch=feat-android#7a257095bac009c4be0b93c2979801624fdd337b"
dependencies = [
"dlopen",
"libc",
@@ -1136,7 +1145,7 @@ dependencies = [
[[package]]
name = "deltachat"
version = "1.112.0"
version = "1.111.0"
dependencies = [
"ansi_term",
"anyhow",
@@ -1165,6 +1174,7 @@ dependencies = [
"kamadak-exif",
"lettre_email",
"libc",
"log",
"mailparse",
"num-derive",
"num-traits",
@@ -1173,7 +1183,6 @@ dependencies = [
"parking_lot",
"percent-encoding",
"pgp",
"pin-project-lite",
"pretty_env_logger",
"proptest",
"qrcodegen",
@@ -1202,9 +1211,6 @@ dependencies = [
"tokio-stream",
"tokio-tar",
"toml",
"tracing",
"tracing-futures",
"tracing-subscriber",
"trust-dns-resolver",
"url",
"uuid",
@@ -1212,7 +1218,7 @@ dependencies = [
[[package]]
name = "deltachat-jsonrpc"
version = "1.112.0"
version = "1.111.0"
dependencies = [
"anyhow",
"async-channel",
@@ -1235,7 +1241,7 @@ dependencies = [
[[package]]
name = "deltachat-repl"
version = "1.112.0"
version = "1.111.0"
dependencies = [
"ansi_term",
"anyhow",
@@ -1246,24 +1252,20 @@ dependencies = [
"rusqlite",
"rustyline",
"tokio",
"tracing",
"tracing-log",
"tracing-subscriber",
]
[[package]]
name = "deltachat-rpc-server"
version = "1.112.0"
version = "1.111.0"
dependencies = [
"anyhow",
"deltachat",
"deltachat-jsonrpc",
"env_logger 0.10.0",
"futures-lite",
"log",
"serde",
"serde_json",
"tokio",
"tracing",
"tracing-subscriber",
"yerpc",
]
@@ -1277,7 +1279,7 @@ dependencies = [
[[package]]
name = "deltachat_ffi"
version = "1.112.0"
version = "1.111.0"
dependencies = [
"anyhow",
"deltachat",
@@ -1286,12 +1288,10 @@ dependencies = [
"libc",
"num-traits",
"once_cell",
"rand 0.8.5",
"rand 0.7.3",
"serde_json",
"thiserror",
"tokio",
"tracing",
"tracing-subscriber",
]
[[package]]
@@ -2427,7 +2427,7 @@ checksum = "30e22bd8629359895450b59ea7a776c850561b96a3b1d31321c1949d9e6c9146"
[[package]]
name = "iroh"
version = "0.3.0"
source = "git+https://github.com/n0-computer/iroh?branch=main#9ac4cf6e770879c8b2ec0dc6666fe531469e68e3"
source = "git+https://github.com/n0-computer/iroh?branch=flub/ticket-multiple-addrs#aacd1d84e11b218e55e9622c4500973224904fe9"
dependencies = [
"abao",
"anyhow",
@@ -2465,7 +2465,6 @@ dependencies = [
"tracing",
"tracing-futures",
"tracing-subscriber",
"walkdir",
"webpki",
"x509-parser",
"zeroize",
@@ -3027,9 +3026,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
[[package]]
name = "openssl"
version = "0.10.48"
version = "0.10.45"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "518915b97df115dd36109bfa429a48b8f737bd05508cf9588977b599648926d2"
checksum = "b102428fd03bc5edf97f62620f7298614c45cedf287c271e7ed450bbaf83f2e1"
dependencies = [
"bitflags",
"cfg-if",
@@ -3068,9 +3067,9 @@ dependencies = [
[[package]]
name = "openssl-sys"
version = "0.9.83"
version = "0.9.80"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "666416d899cf077260dac8698d60a60b435a46d57e82acb1be3d0dad87284e5b"
checksum = "23bbbf7854cd45b83958ebe919f0e8e516793727652e27fda10a8384cfc790b7"
dependencies = [
"autocfg",
"cc",

View File

@@ -1,6 +1,6 @@
[package]
name = "deltachat"
version = "1.112.0"
version = "1.111.0"
edition = "2021"
license = "MPL-2.0"
rust-version = "1.64"
@@ -25,7 +25,7 @@ panic = 'abort'
opt-level = "z"
[patch.crates-io]
default-net = { git = "https://github.com/dignifiedquire/default-net.git", rev="7a257095bac009c4be0b93c2979801624fdd337b" }
default-net = { git = "https://github.com/dignifiedquire/default-net.git", branch="feat-android" }
quinn-udp = { git = "https://github.com/quinn-rs/quinn", branch="main" }
quinn-proto = { git = "https://github.com/quinn-rs/quinn", branch="main" }
@@ -39,7 +39,7 @@ async-channel = "1.8.0"
async-imap = { git = "https://github.com/async-email/async-imap", branch = "master", default-features = false, features = ["runtime-tokio"] }
async-native-tls = { version = "0.5", default-features = false, features = ["runtime-tokio"] }
async-smtp = { version = "0.9", default-features = false, features = ["runtime-tokio"] }
async_zip = { version = "0.0.11", default-features = false, features = ["deflate", "fs"] }
async_zip = { version = "0.0.9", default-features = false, features = ["deflate"] }
backtrace = "0.3"
base64 = "0.21"
bitflags = "1.3"
@@ -54,7 +54,7 @@ hex = "0.4.0"
humansize = "2"
image = { version = "0.24.5", default-features=false, features = ["gif", "jpeg", "ico", "png", "pnm", "webp", "bmp"] }
# iroh = { version = "0.3.0", default-features = false }
iroh = { git = 'https://github.com/n0-computer/iroh', branch = "main" }
iroh = { git = 'https://github.com/n0-computer/iroh', branch = "flub/ticket-multiple-addrs" }
kamadak-exif = "0.5"
lettre_email = { git = "https://github.com/deltachat/lettre", branch = "master" }
libc = "0.2"
@@ -66,12 +66,8 @@ once_cell = "1.17.0"
percent-encoding = "2.2"
parking_lot = "0.12"
pgp = { version = "0.9", default-features = false }
pin-project-lite = { version = "0.2.9" }
pretty_env_logger = { version = "0.4", optional = true }
qrcodegen = "1.7.0"
tracing = "0.1"
tracing-subscriber = { version = "0.3", default-features=false, features = ["std", "env-filter", "registry", "fmt"] }
tracing-futures = "0.2"
quick-xml = "0.27"
rand = "0.8"
regex = "1.7"
@@ -102,6 +98,8 @@ uuid = { version = "1", features = ["serde", "v4"] }
ansi_term = "0.12.0"
criterion = { version = "0.4.0", features = ["async_tokio"] }
futures-lite = "1.12"
log = "0.4"
pretty_env_logger = "0.4"
proptest = { version = "1", default-features = false, features = ["std"] }
tempfile = "3"
testdir = "0.7.2"

View File

@@ -19,7 +19,7 @@ $ curl https://sh.rustup.rs -sSf | sh
Compile and run Delta Chat Core command line utility, using `cargo`:
```
$ cargo run -p deltachat-repl -- ~/deltachat-db
$ RUST_LOG=deltachat_repl=info cargo run -p deltachat-repl -- ~/deltachat-db
```
where ~/deltachat-db is the database file. Delta Chat will create it if it does not exist.

View File

@@ -2,15 +2,15 @@ use criterion::{criterion_group, criterion_main, Criterion};
use deltachat::context::Context;
use deltachat::stock_str::StockStrings;
use deltachat::{Event, EventType, Events};
use deltachat::{info, Event, EventType, Events};
use tempfile::tempdir;
async fn send_events_benchmark(context: &Context) {
let emitter = context.get_event_emitter();
for _i in 0..1_000_000 {
context.emit_event(EventType::Info("interesting event...".to_string()));
info!(context, "interesting event...");
}
context.emit_event(EventType::Info("DONE".to_string()));
info!(context, "DONE");
loop {
match emitter.recv().await.unwrap() {

View File

@@ -1,6 +1,6 @@
[package]
name = "deltachat_ffi"
version = "1.112.0"
version = "1.111.0"
description = "Deltachat FFI"
edition = "2018"
readme = "README.md"
@@ -23,10 +23,8 @@ serde_json = "1.0"
tokio = { version = "1", features = ["rt-multi-thread"] }
anyhow = "1"
thiserror = "1"
rand = "0.8"
rand = "0.7"
once_cell = "1.17.0"
tracing = "0.1"
tracing-subscriber = { version = "0.3", default-features=false, features = ["std", "registry"] }
[features]
default = ["vendored"]

View File

@@ -5705,14 +5705,6 @@ void dc_event_unref(dc_event_t* event);
*/
#define DC_EVENT_IMAP_MESSAGE_MOVED 105
/**
* Emitted before going into IDLE on the Inbox folder.
*
* @param data1 0
* @param data2 0
*/
#define DC_EVENT_IMAP_INBOX_IDLE 106
/**
* Emitted when a new blob file was successfully written
*
@@ -6998,11 +6990,6 @@ void dc_event_unref(dc_event_t* event);
/// `%1$s` will be replaced by name and address of the contact.
#define DC_STR_PROTECTION_DISABLED_BY_OTHER 161
/// "Scan to set up second device for %1$s"
///
/// `%1$s` will be replaced by name and address of the account.
#define DC_STR_BACKUP_TRANSFER_QR 162
/**
* @}
*/

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[package]
name = "deltachat-jsonrpc"
version = "1.112.0"
version = "1.111.0"
description = "DeltaChat JSON-RPC API"
edition = "2021"
default-run = "deltachat-jsonrpc-server"

View File

@@ -47,9 +47,6 @@ pub enum JSONRPCEventType {
msg: String,
},
/// Emitted before going into IDLE on the Inbox folder.
ImapInboxIdle,
/// Emitted when an new file in the $BLOBDIR was created
NewBlobFile {
file: String,
@@ -296,7 +293,6 @@ impl From<EventType> for JSONRPCEventType {
EventType::SmtpMessageSent(msg) => SmtpMessageSent { msg },
EventType::ImapMessageDeleted(msg) => ImapMessageDeleted { msg },
EventType::ImapMessageMoved(msg) => ImapMessageMoved { msg },
EventType::ImapInboxIdle => ImapInboxIdle,
EventType::NewBlobFile(file) => NewBlobFile { file },
EventType::DeletedBlobFile(file) => DeletedBlobFile { file },
EventType::Warning(msg) => Warning { msg },

File diff suppressed because it is too large Load Diff

View File

@@ -55,5 +55,5 @@
},
"type": "module",
"types": "dist/deltachat.d.ts",
"version": "1.112.0"
"version": "1.111.0"
}

View File

@@ -1,6 +1,6 @@
[package]
name = "deltachat-repl"
version = "1.112.0"
version = "1.111.0"
license = "MPL-2.0"
edition = "2021"
@@ -14,9 +14,6 @@ pretty_env_logger = "0.4"
rusqlite = "0.28"
rustyline = "11"
tokio = { version = "1", features = ["fs", "rt-multi-thread", "macros"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", default-features=false, features = ["std", "env-filter", "registry", "fmt"] }
tracing-log = "0.1.3"
[features]
default = ["vendored"]

View File

@@ -465,7 +465,7 @@ pub async fn cmdline(context: Context, line: &str, chat_id: &mut ChatId) -> Resu
continue_key_transfer(&context, MsgId::new(arg1.parse()?), arg2).await?;
}
"has-backup" => {
has_backup(blobdir).await?;
has_backup(&context, blobdir).await?;
}
"export-backup" => {
let dir = dirs::home_dir().unwrap_or_default();
@@ -508,7 +508,7 @@ pub async fn cmdline(context: Context, line: &str, chat_id: &mut ChatId) -> Resu
imex(&context, ImexMode::ImportSelfKeys, arg1.as_ref(), None).await?;
}
"export-setup" => {
let setup_code = create_setup_code();
let setup_code = create_setup_code(&context);
let file_name = blobdir.join("autocrypt-setup-message.html");
let file_content = render_setup_file(&context, &setup_code).await?;
fs::write(&file_name, file_content).await?;
@@ -1244,7 +1244,7 @@ pub async fn cmdline(context: Context, line: &str, chat_id: &mut ChatId) -> Resu
let socks5_enabled = context
.get_config_bool(config::Config::Socks5Enabled)
.await?;
match provider::get_provider_info(arg1, socks5_enabled).await {
match provider::get_provider_info(&context, arg1, socks5_enabled).await {
Some(info) => {
println!("Information for provider belonging to {arg1}:");
println!("status: {}", info.status as u32);

View File

@@ -22,6 +22,7 @@ use deltachat::qr_code_generator::get_securejoin_qr_svg;
use deltachat::securejoin::*;
use deltachat::stock_str::StockStrings;
use deltachat::{EventType, Events};
use log::{error, info, warn};
use rustyline::completion::{Completer, FilenameCompleter, Pair};
use rustyline::error::ReadlineError;
use rustyline::highlight::{Highlighter, MatchingBracketHighlighter};
@@ -32,9 +33,6 @@ use rustyline::{
};
use tokio::fs;
use tokio::runtime::Handle;
use tracing::{error, info, warn};
use tracing_log::LogTracer;
use tracing_subscriber::{fmt, EnvFilter};
mod cmdline;
use self::cmdline::*;
@@ -483,12 +481,7 @@ async fn handle_cmd(
#[tokio::main]
async fn main() -> Result<(), Error> {
// Convert `log` records into `tracing` events.
LogTracer::init()?;
// Setup `tracing` subscriber according to `RUST_LOG` environment variable.
let filter = EnvFilter::try_from_default_env().or_else(|_| EnvFilter::try_new("info"))?;
fmt().with_env_filter(filter).with_writer(io::stderr).init();
let _ = pretty_env_logger::try_init();
let args = std::env::args().collect();
start(args).await?;

View File

@@ -126,7 +126,7 @@ class Chat:
"file": file,
"location": location,
"overrideSenderName": override_sender_name,
"quotedMessageId": quoted_msg,
"quotedMsg": quoted_msg,
}
msg_id = await self._rpc.send_msg(self.account.id, self.id, draft)
return Message(self.account, msg_id)

View File

@@ -31,7 +31,6 @@ class EventType(str, Enum):
SMTP_MESSAGE_SENT = "SmtpMessageSent"
IMAP_MESSAGE_DELETED = "ImapMessageDeleted"
IMAP_MESSAGE_MOVED = "ImapMessageMoved"
IMAP_INBOX_IDLE = "ImapInboxIdle"
NEW_BLOB_FILE = "NewBlobFile"
DELETED_BLOB_FILE = "DeletedBlobFile"
WARNING = "Warning"

View File

@@ -55,11 +55,6 @@ class ACFactory:
async def get_online_account(self) -> Account:
account = await self.new_configured_account()
await account.start_io()
while True:
event = await account.wait_for_event()
print(event)
if event.type == EventType.IMAP_INBOX_IDLE:
break
return account
async def get_online_accounts(self, num: int) -> List[Account]:

View File

@@ -6,7 +6,7 @@ envlist =
[testenv]
commands =
pytest {posargs}
pytest --exitfirst {posargs}
setenv =
# Avoid stack overflow when Rust core is built without optimizations.
RUST_MIN_STACK=8388608
@@ -15,7 +15,6 @@ passenv =
deps =
pytest
pytest-asyncio
pytest-timeout
aiohttp
aiodns
@@ -28,6 +27,3 @@ deps =
commands =
black --quiet --check --diff src/ examples/ tests/
ruff src/ examples/ tests/
[pytest]
timeout = 60

View File

@@ -1,6 +1,6 @@
[package]
name = "deltachat-rpc-server"
version = "1.112.0"
version = "1.111.0"
description = "DeltaChat JSON-RPC server"
edition = "2021"
readme = "README.md"
@@ -9,17 +9,19 @@ license = "MPL-2.0"
keywords = ["deltachat", "chat", "openpgp", "email", "encryption"]
categories = ["cryptography", "std", "email"]
[[bin]]
name = "deltachat-rpc-server"
[dependencies]
deltachat-jsonrpc = { path = "../deltachat-jsonrpc", default-features = false }
deltachat = { path = "..", default-features = false }
anyhow = "1"
env_logger = { version = "0.10.0" }
futures-lite = "1.12.0"
log = "0.4"
serde_json = "1.0.91"
serde = { version = "1.0", features = ["derive"] }
tokio = { version = "1.25.0", features = ["io-std"] }
tracing-subscriber = { version = "0.3", default-features=false, features = ["std", "env-filter", "fmt"] }
tracing = { version = "0.1" }
yerpc = { version = "0.4.0", features = ["anyhow_expose"] }
[features]

View File

@@ -1,51 +1,26 @@
use std::env;
///! Delta Chat core RPC server.
///!
///! It speaks JSON Lines over stdio.
use std::path::PathBuf;
use anyhow::{anyhow, Context as _, Result};
use deltachat::constants::DC_VERSION_STR;
use anyhow::Result;
use deltachat_jsonrpc::api::events::event_to_json_rpc_notification;
use deltachat_jsonrpc::api::{Accounts, CommandApi};
use futures_lite::stream::StreamExt;
use tokio::io::{self, AsyncBufReadExt, BufReader};
use tokio::task::JoinHandle;
use tracing::{info, trace};
use tracing_subscriber::{fmt, EnvFilter};
use yerpc::{RpcClient, RpcSession};
#[tokio::main(flavor = "multi_thread")]
async fn main() -> Result<()> {
let mut args = env::args_os();
let _program_name = args.next().context("no command line arguments found")?;
if let Some(first_arg) = args.next() {
if first_arg.to_str() == Some("--version") {
if let Some(arg) = args.next() {
return Err(anyhow!("Unrecognized argument {:?}", arg));
}
eprintln!("{}", &*DC_VERSION_STR);
return Ok(());
} else {
return Err(anyhow!("Unrecognized option {:?}", first_arg));
}
}
if let Some(arg) = args.next() {
return Err(anyhow!("Unrecognized argument {:?}", arg));
}
let filter = EnvFilter::try_from_default_env().or_else(|_| EnvFilter::try_new("info"))?;
fmt()
.with_env_filter(filter)
.with_writer(std::io::stderr)
.init();
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init();
let path = std::env::var("DC_ACCOUNTS_PATH").unwrap_or_else(|_| "accounts".to_string());
info!("Starting with accounts directory `{}`.", path);
log::info!("Starting with accounts directory `{}`.", path);
let accounts = Accounts::new(PathBuf::from(&path)).await?;
let events = accounts.get_event_emitter();
info!("Creating JSON-RPC API.");
log::info!("Creating JSON-RPC API.");
let state = CommandApi::new(accounts);
let (client, mut out_receiver) = RpcClient::new();
@@ -64,7 +39,7 @@ async fn main() -> Result<()> {
let send_task: JoinHandle<anyhow::Result<()>> = tokio::spawn(async move {
while let Some(message) = out_receiver.next().await {
let message = serde_json::to_string(&message)?;
trace!("RPC send {message}");
log::trace!("RPC send {}", message);
println!("{message}");
}
Ok(())
@@ -75,13 +50,13 @@ async fn main() -> Result<()> {
let stdin = io::stdin();
let mut lines = BufReader::new(stdin).lines();
while let Some(message) = lines.next_line().await? {
trace!("RPC recv {}", message);
log::trace!("RPC recv {}", message);
let session = session.clone();
tokio::spawn(async move {
session.handle_incoming(&message).await;
});
}
info!("EOF reached on stdin");
log::info!("EOF reached on stdin");
Ok(())
});

View File

@@ -1,5 +1,3 @@
use tempfile::tempdir;
use deltachat::chat::{self, ChatId};
use deltachat::chatlist::*;
use deltachat::config;
@@ -8,25 +6,24 @@ use deltachat::context::*;
use deltachat::message::Message;
use deltachat::stock_str::StockStrings;
use deltachat::{EventType, Events};
use tracing::{error, info, warn};
use tracing_subscriber::{fmt, EnvFilter};
use tempfile::tempdir;
fn cb(event: EventType) {
match event {
EventType::ConfigureProgress { progress, .. } => {
info!("progress: {progress}");
log::info!("progress: {}", progress);
}
EventType::Info(msg) => {
info!("{msg}");
log::info!("{}", msg);
}
EventType::Warning(msg) => {
warn!("{msg}");
log::warn!("{}", msg);
}
EventType::Error(msg) => {
error!("{msg}");
log::error!("{}", msg);
}
event => {
info!("{event:?}");
log::info!("{:?}", event);
}
}
}
@@ -34,22 +31,16 @@ fn cb(event: EventType) {
/// Run with `RUST_LOG=simple=info cargo run --release --example simple -- email pw`.
#[tokio::main]
async fn main() {
let filter = EnvFilter::try_from_default_env()
.or_else(|_| EnvFilter::try_new("info"))
.unwrap();
fmt()
.with_env_filter(filter)
.with_writer(std::io::stderr)
.init();
pretty_env_logger::try_init_timed().ok();
let dir = tempdir().unwrap();
let dbfile = dir.path().join("db.sqlite");
info!("creating database {:?}", dbfile);
log::info!("creating database {:?}", dbfile);
let ctx = Context::new(&dbfile, 0, Events::new(), StockStrings::new())
.await
.expect("Failed to create context");
let info = ctx.get_info().await;
info!("info: {:#?}", info);
log::info!("info: {:#?}", info);
let events = ctx.get_event_emitter();
let events_spawn = tokio::task::spawn(async move {
@@ -58,7 +49,7 @@ async fn main() {
}
});
info!("configuring");
log::info!("configuring");
let args = std::env::args().collect::<Vec<String>>();
assert_eq!(args.len(), 3, "requires email password");
let email = args[1].clone();
@@ -72,9 +63,9 @@ async fn main() {
ctx.configure().await.unwrap();
info!("------ RUN ------");
log::info!("------ RUN ------");
ctx.start_io().await;
info!("--- SENDING A MESSAGE ---");
log::info!("--- SENDING A MESSAGE ---");
let contact_id = Contact::create(&ctx, "dignifiedquire", "dignifiedquire@gmail.com")
.await
@@ -82,7 +73,7 @@ async fn main() {
let chat_id = ChatId::create_for_contact(&ctx, contact_id).await.unwrap();
for i in 0..1 {
info!("sending message {}", i);
log::info!("sending message {}", i);
chat::send_text_msg(&ctx, chat_id, format!("Hi, here is my {i}nth message!"))
.await
.unwrap();
@@ -91,19 +82,19 @@ async fn main() {
// wait for the message to be sent out
tokio::time::sleep(std::time::Duration::from_secs(1)).await;
info!("fetching chats..");
log::info!("fetching chats..");
let chats = Chatlist::try_load(&ctx, 0, None, None).await.unwrap();
for i in 0..chats.len() {
let msg = Message::load_from_db(&ctx, chats.get_msg_id(i).unwrap().unwrap())
.await
.unwrap();
info!("[{i}] msg: {msg:?}");
log::info!("[{}] msg: {:?}", i, msg);
}
info!("stopping");
log::info!("stopping");
ctx.stop_io().await;
info!("closing");
log::info!("closing");
drop(ctx);
events_spawn.await.unwrap();
}

1287
fuzz/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -37,7 +37,6 @@ module.exports = {
DC_EVENT_ERROR: 400,
DC_EVENT_ERROR_SELF_NOT_IN_GROUP: 410,
DC_EVENT_IMAP_CONNECTED: 102,
DC_EVENT_IMAP_INBOX_IDLE: 106,
DC_EVENT_IMAP_MESSAGE_DELETED: 104,
DC_EVENT_IMAP_MESSAGE_MOVED: 105,
DC_EVENT_IMEX_FILE_WRITTEN: 2052,
@@ -113,7 +112,6 @@ module.exports = {
DC_QR_ADDR: 320,
DC_QR_ASK_VERIFYCONTACT: 200,
DC_QR_ASK_VERIFYGROUP: 202,
DC_QR_BACKUP: 251,
DC_QR_ERROR: 400,
DC_QR_FPR_MISMATCH: 220,
DC_QR_FPR_OK: 210,
@@ -151,7 +149,6 @@ module.exports = {
DC_STR_AEAP_EXPLANATION_AND_LINK: 123,
DC_STR_ARCHIVEDCHATS: 40,
DC_STR_AUDIO: 11,
DC_STR_BACKUP_TRANSFER_QR: 162,
DC_STR_BAD_TIME_MSG_BODY: 85,
DC_STR_BROADCAST_LIST: 115,
DC_STR_CANNOT_LOGIN: 60,

View File

@@ -8,7 +8,6 @@ module.exports = {
103: 'DC_EVENT_SMTP_MESSAGE_SENT',
104: 'DC_EVENT_IMAP_MESSAGE_DELETED',
105: 'DC_EVENT_IMAP_MESSAGE_MOVED',
106: 'DC_EVENT_IMAP_INBOX_IDLE',
150: 'DC_EVENT_NEW_BLOB_FILE',
151: 'DC_EVENT_DELETED_BLOB_FILE',
300: 'DC_EVENT_WARNING',

View File

@@ -37,7 +37,6 @@ export enum C {
DC_EVENT_ERROR = 400,
DC_EVENT_ERROR_SELF_NOT_IN_GROUP = 410,
DC_EVENT_IMAP_CONNECTED = 102,
DC_EVENT_IMAP_INBOX_IDLE = 106,
DC_EVENT_IMAP_MESSAGE_DELETED = 104,
DC_EVENT_IMAP_MESSAGE_MOVED = 105,
DC_EVENT_IMEX_FILE_WRITTEN = 2052,
@@ -113,7 +112,6 @@ export enum C {
DC_QR_ADDR = 320,
DC_QR_ASK_VERIFYCONTACT = 200,
DC_QR_ASK_VERIFYGROUP = 202,
DC_QR_BACKUP = 251,
DC_QR_ERROR = 400,
DC_QR_FPR_MISMATCH = 220,
DC_QR_FPR_OK = 210,
@@ -151,7 +149,6 @@ export enum C {
DC_STR_AEAP_EXPLANATION_AND_LINK = 123,
DC_STR_ARCHIVEDCHATS = 40,
DC_STR_AUDIO = 11,
DC_STR_BACKUP_TRANSFER_QR = 162,
DC_STR_BAD_TIME_MSG_BODY = 85,
DC_STR_BROADCAST_LIST = 115,
DC_STR_CANNOT_LOGIN = 60,
@@ -292,7 +289,6 @@ export const EventId2EventName: { [key: number]: string } = {
103: 'DC_EVENT_SMTP_MESSAGE_SENT',
104: 'DC_EVENT_IMAP_MESSAGE_DELETED',
105: 'DC_EVENT_IMAP_MESSAGE_MOVED',
106: 'DC_EVENT_IMAP_INBOX_IDLE',
150: 'DC_EVENT_NEW_BLOB_FILE',
151: 'DC_EVENT_DELETED_BLOB_FILE',
300: 'DC_EVENT_WARNING',

View File

@@ -60,5 +60,5 @@
"test:mocha": "mocha -r esm node/test/test.js --growl --reporter=spec --bail --exit"
},
"types": "node/dist/index.d.ts",
"version": "1.112.0"
"version": "1.111.0"
}

View File

@@ -95,7 +95,6 @@ class Account:
ptr,
lib.dc_context_unref,
)
self._id = lib.dc_get_id(self._dc_context)
self._shutdown_event = Event()
self._event_thread = EventThread(self)
@@ -140,10 +139,6 @@ class Account:
"""return dictionary of built config parameters."""
return get_dc_info_as_dict(self._dc_context)
def get_id(self) -> int:
"""Return account ID."""
return self._id
def dump_account_info(self, logfile):
def log(*args, **kwargs):
kwargs["file"] = logfile

View File

@@ -190,7 +190,7 @@ class FFIEventTracker:
- ac2 is still running FetchExsistingMsgs job and thinks it's an existing, old message
- therefore no DC_EVENT_INCOMING_MSG is sent
"""
self.get_matching("DC_EVENT_IMAP_INBOX_IDLE")
self.get_info_contains("INBOX: Idle entering")
def wait_next_incoming_message(self):
"""wait for and return next incoming message."""
@@ -263,7 +263,6 @@ class EventThread(threading.Thread):
self._process_event(event)
def _process_event(self, event) -> None:
account_id = lib.dc_event_get_account_id(event)
evt = lib.dc_event_get_id(event)
data1 = lib.dc_event_get_data1_int(event)
# the following code relates to the deltachat/_build.py's helper
@@ -273,7 +272,6 @@ class EventThread(threading.Thread):
data2 = from_optional_dc_charpointer(lib.dc_event_get_data2_str(event))
else:
data2 = lib.dc_event_get_data2_int(event)
assert account_id == self.account.get_id(), f"data2={data2}"
lib.dc_event_unref(event)
ffi_event = FFIEvent(name=evt_name, data1=data1, data2=data2)

View File

@@ -0,0 +1,19 @@
import os
import subprocess
import sys
if __name__ == "__main__":
assert len(sys.argv) == 2
workspacedir = sys.argv[1]
for relpath in os.listdir(workspacedir):
if relpath.startswith("deltachat"):
p = os.path.join(workspacedir, relpath)
subprocess.check_call(
[
"auditwheel",
"repair",
p,
"-w",
workspacedir,
],
)

View File

@@ -220,16 +220,16 @@ def test_fetch_existing(acfactory, lp, mvbox_move):
acfactory.bring_accounts_online()
assert_folders_configured(ac1)
lp.sec("send out message with bcc to ourselves")
ac1.set_config("bcc_self", "1")
chat = acfactory.get_accepted_chat(ac1, ac2)
chat.send_text("message text")
assert ac1.direct_imap.select_config_folder("mvbox" if mvbox_move else "inbox")
with ac1.direct_imap.idle() as idle1:
lp.sec("send out message with bcc to ourselves")
ac1.set_config("bcc_self", "1")
chat = acfactory.get_accepted_chat(ac1, ac2)
chat.send_text("message text")
assert_folders_configured(ac1)
lp.sec("wait until the bcc_self message arrives in correct folder and is marked seen")
if mvbox_move:
ac1._evtracker.get_info_contains("Marked messages [0-9]+ in folder DeltaChat as seen.")
else:
ac1._evtracker.get_info_contains("Marked messages [0-9]+ in folder INBOX as seen.")
lp.sec("wait until the bcc_self message arrives in correct folder and is marked seen")
assert idle1.wait_for_seen()
assert_folders_configured(ac1)
lp.sec("create a cloned ac1 and fetch contact history during configure")
@@ -271,12 +271,12 @@ def test_fetch_existing_msgs_group_and_single(acfactory, lp):
ac1._evtracker.wait_next_incoming_message()
lp.sec("send out message with bcc to ourselves")
ac1.set_config("bcc_self", "1")
ac1_ac2_chat = ac1.create_chat(ac2)
ac1_ac2_chat.send_text("outgoing, encrypted direct message, creating a chat")
# wait until the bcc_self message arrives
ac1._evtracker.get_info_contains("Marked messages [0-9]+ in folder INBOX as seen.")
with ac1.direct_imap.idle() as idle1:
ac1.set_config("bcc_self", "1")
ac1_ac2_chat = ac1.create_chat(ac2)
ac1_ac2_chat.send_text("outgoing, encrypted direct message, creating a chat")
# wait until the bcc_self message arrives
assert idle1.wait_for_seen()
lp.sec("Clone online account and let it fetch the existing messages")
ac1_clone = acfactory.new_online_configuring_account(cloned_from=ac1)

View File

@@ -134,27 +134,22 @@ def test_one_account_send_bcc_setting(acfactory, lp):
ac1.set_config("bcc_self", "1")
lp.sec("send out message with bcc to ourselves")
msg_out = chat.send_text("message2")
with ac1.direct_imap.idle() as idle1:
msg_out = chat.send_text("message2")
# wait for send out (BCC)
ev = ac1._evtracker.get_matching("DC_EVENT_SMTP_MESSAGE_SENT")
assert ac1.get_config("bcc_self") == "1"
# wait for send out (BCC)
ev = ac1._evtracker.get_matching("DC_EVENT_SMTP_MESSAGE_SENT")
assert ac1.get_config("bcc_self") == "1"
# Second client receives only second message, but not the first.
# now make sure we are sending message to ourselves too
assert self_addr in ev.data2
assert other_addr in ev.data2
assert idle1.wait_for_seen()
# Second client receives only second message, but not the first
ev_msg = ac1_clone._evtracker.wait_next_messages_changed()
assert ev_msg.text == msg_out.text
# now make sure we are sending message to ourselves too
assert self_addr in ev.data2
assert other_addr in ev.data2
# BCC-self messages are marked as seen by the sender device.
ac1._evtracker.get_info_contains("Marked messages [0-9]+ in folder INBOX as seen.")
# Check that the message is marked as seen on IMAP.
ac1.direct_imap.select_folder("Inbox")
assert len(list(ac1.direct_imap.conn.fetch(AND(seen=True)))) == 1
def test_send_file_twice_unicode_filename_mangling(tmpdir, acfactory, lp):
ac1, ac2 = acfactory.get_online_accounts(2)
@@ -518,22 +513,22 @@ def test_send_and_receive_message_markseen(acfactory, lp):
msg4 = ac2._evtracker.wait_next_incoming_message()
lp.sec("mark messages as seen on ac2, wait for changes on ac1")
ac2.mark_seen_messages([msg2, msg4])
ev = ac2._evtracker.get_matching("DC_EVENT_MSGS_NOTICED")
assert msg2.chat.id == msg4.chat.id
assert ev.data1 == msg2.chat.id
assert ev.data2 == 0
ac2._evtracker.get_info_contains("Marked messages .* in folder INBOX as seen.")
with ac1.direct_imap.idle() as idle1:
with ac2.direct_imap.idle() as idle2:
ac2.mark_seen_messages([msg2, msg4])
ev = ac2._evtracker.get_matching("DC_EVENT_MSGS_NOTICED")
assert msg2.chat.id == msg4.chat.id
assert ev.data1 == msg2.chat.id
assert ev.data2 == 0
idle2.wait_for_seen()
lp.step("1")
for _i in range(2):
ev = ac1._evtracker.get_matching("DC_EVENT_MSG_READ")
assert ev.data1 > const.DC_CHAT_ID_LAST_SPECIAL
assert ev.data2 > const.DC_MSG_ID_LAST_SPECIAL
lp.step("2")
# Check that ac1 marks the read receipt as read.
ac1._evtracker.get_info_contains("Marked messages .* in folder INBOX as seen.")
lp.step("1")
for _i in range(2):
ev = ac1._evtracker.get_matching("DC_EVENT_MSG_READ")
assert ev.data1 > const.DC_CHAT_ID_LAST_SPECIAL
assert ev.data2 > const.DC_MSG_ID_LAST_SPECIAL
lp.step("2")
idle1.wait_for_seen() # Check that ac1 marks the read receipt as read
assert msg1.is_out_mdn_received()
assert msg3.is_out_mdn_received()
@@ -618,24 +613,18 @@ def test_markseen_message_and_mdn(acfactory, mvbox_move):
# Do not send BCC to self, we only want to test MDN on ac1.
ac1.set_config("bcc_self", "0")
acfactory.get_accepted_chat(ac1, ac2).send_text("hi")
msg = ac2._evtracker.wait_next_incoming_message()
ac2.mark_seen_messages([msg])
folder = "mvbox" if mvbox_move else "inbox"
for ac in [ac1, ac2]:
if mvbox_move:
ac._evtracker.get_info_contains("Marked messages [0-9]+ in folder DeltaChat as seen.")
else:
ac._evtracker.get_info_contains("Marked messages [0-9]+ in folder INBOX as seen.")
ac1.direct_imap.select_config_folder(folder)
ac2.direct_imap.select_config_folder(folder)
with ac1.direct_imap.idle() as idle1:
with ac2.direct_imap.idle() as idle2:
acfactory.get_accepted_chat(ac1, ac2).send_text("hi")
msg = ac2._evtracker.wait_next_incoming_message()
# Check that the mdn is marked as seen
assert len(list(ac1.direct_imap.conn.fetch(AND(seen=True)))) == 1
# Check original message is marked as seen
assert len(list(ac2.direct_imap.conn.fetch(AND(seen=True)))) == 1
ac2.mark_seen_messages([msg])
idle2.wait_for_seen() # Check original message is marked as seen
idle1.wait_for_seen() # Check that the mdn is marked as seen
def test_reply_privately(acfactory):
@@ -689,24 +678,23 @@ def test_mdn_asymmetric(acfactory, lp):
assert len(msg.chat.get_messages()) == 1
lp.sec("ac2: mark incoming message as seen")
ac2.mark_seen_messages([msg])
ac1.direct_imap.select_config_folder("mvbox")
with ac1.direct_imap.idle() as idle1:
lp.sec("ac2: mark incoming message as seen")
ac2.mark_seen_messages([msg])
lp.sec("ac1: waiting for incoming activity")
# MDN should be moved even though MDNs are already disabled
ac1._evtracker.get_matching("DC_EVENT_IMAP_MESSAGE_MOVED")
lp.sec("ac1: waiting for incoming activity")
# MDN should be moved even though MDNs are already disabled
ac1._evtracker.get_matching("DC_EVENT_IMAP_MESSAGE_MOVED")
assert len(chat.get_messages()) == 1
assert len(chat.get_messages()) == 1
# Wait for the message to be marked as seen on IMAP.
ac1._evtracker.get_info_contains("Marked messages 1 in folder DeltaChat as seen.")
# Wait for the message to be marked as seen on IMAP.
assert idle1.wait_for_seen()
# MDN is received even though MDNs are already disabled
assert msg_out.is_out_mdn_received()
ac1.direct_imap.select_config_folder("mvbox")
assert len(list(ac1.direct_imap.conn.fetch(AND(seen=True)))) == 1
def test_send_and_receive_will_encrypt_decrypt(acfactory, lp):
ac1, ac2 = acfactory.get_online_accounts(2)

View File

@@ -8,7 +8,7 @@ envlist =
[testenv]
commands =
pytest -n6 --extra-info -v -rsXx --ignored --strict-tls {posargs: tests examples}
pytest -n6 --exitfirst --extra-info -v -rsXx --ignored --strict-tls {posargs: tests examples}
pip wheel . -w {toxworkdir}/wheelhouse --no-deps
setenv =
# Avoid stack overflow when Rust core is built without optimizations.
@@ -33,6 +33,18 @@ passenv =
CARGO_TARGET_DIR
RUSTC_WRAPPER
[testenv:auditwheels]
skipsdist = True
deps = auditwheel
passenv =
DCC_RS_DEV
DCC_RS_TARGET
AUDITWHEEL_ARCH
AUDITWHEEL_PLAT
AUDITWHEEL_POLICY
commands =
python tests/auditwheels.py {toxworkdir}/wheelhouse
[testenv:lint]
skipsdist = True
skip_install = True

View File

@@ -31,9 +31,7 @@ unset DCC_NEW_TMP_EMAIL
# Try to build wheels for a range of interpreters, but don't fail if they are not available.
# E.g. musllinux_1_1 does not have PyPy interpreters as of 2022-07-10
tox --workdir "$TOXWORKDIR" -e py37,py38,py39,py310,py311,pypy37,pypy38,pypy39 --skip-missing-interpreters true
auditwheel repair "$TOXWORKDIR/wheelhouse/deltachat*" -w "$TOXWORKDIR/wheelhouse"
tox --workdir "$TOXWORKDIR" -e py37,py38,py39,py310,py311,pypy37,pypy38,pypy39,auditwheels --skip-missing-interpreters true
echo -----------------------

View File

@@ -93,7 +93,7 @@ def main():
if "alpha" not in newversion:
for line in open("CHANGELOG.md"):
## 1.25.0
if line.startswith("## [") and line[4:].strip().startswith(newversion):
if line.startswith("## ") and line[2:].strip().startswith(newversion):
break
else:
raise SystemExit(

View File

@@ -262,7 +262,7 @@ impl Accounts {
pub async fn stop_io(&self) {
// Sending an event here wakes up event loop even
// if there are no accounts.
self.emit_event(EventType::Info("Stopping IO for all accounts.".to_string()));
info!(self, "Stopping IO for all accounts");
for account in self.accounts.values() {
account.stop_io().await;
}
@@ -291,11 +291,6 @@ impl Accounts {
pub fn get_event_emitter(&self) -> EventEmitter {
self.events.get_emitter()
}
/// Returns event channel.
pub fn events(&self) -> Events {
self.events.clone()
}
}
/// Configuration file name.

View File

@@ -14,7 +14,6 @@ use num_traits::FromPrimitive;
use tokio::io::AsyncWriteExt;
use tokio::{fs, io};
use tokio_stream::wrappers::ReadDirStream;
use tracing::{error, info, warn};
use crate::config::Config;
use crate::constants::{
@@ -330,7 +329,7 @@ impl<'a> BlobObject<'a> {
// max_bytes is 20_000 bytes: Outlook servers don't allow headers larger than 32k.
// 32 / 4 * 3 = 24k if you account for base64 encoding. To be safe, we reduced this to 20k.
if let Some(new_name) = self.recode_to_size(blob_abs, img_wh, Some(20_000))? {
if let Some(new_name) = self.recode_to_size(context, blob_abs, img_wh, Some(20_000))? {
self.name = new_name;
}
Ok(())
@@ -352,7 +351,10 @@ impl<'a> BlobObject<'a> {
MediaQuality::Worse => WORSE_IMAGE_SIZE,
};
if self.recode_to_size(blob_abs, img_wh, None)?.is_some() {
if self
.recode_to_size(context, blob_abs, img_wh, None)?
.is_some()
{
return Err(format_err!(
"Internal error: recode_to_size(..., None) shouldn't change the name of the image"
));
@@ -362,20 +364,21 @@ impl<'a> BlobObject<'a> {
fn recode_to_size(
&self,
context: &Context,
mut blob_abs: PathBuf,
mut img_wh: u32,
max_bytes: Option<usize>,
) -> Result<Option<String>> {
tokio::task::block_in_place(move || {
let mut img = image::open(&blob_abs).context("image recode failure")?;
let orientation = self.get_exif_orientation();
let orientation = self.get_exif_orientation(context);
let mut encoded = Vec::new();
let mut changed_name = None;
let exceeds_width = img.width() > img_wh || img.height() > img_wh;
let do_scale =
exceeds_width || encoded_img_exceeds_bytes(&img, max_bytes, &mut encoded)?;
exceeds_width || encoded_img_exceeds_bytes(context, &img, max_bytes, &mut encoded)?;
let do_rotate = matches!(orientation, Ok(90) | Ok(180) | Ok(270));
if do_scale || do_rotate {
@@ -398,7 +401,7 @@ impl<'a> BlobObject<'a> {
loop {
let new_img = img.thumbnail(img_wh, img_wh);
if encoded_img_exceeds_bytes(&new_img, max_bytes, &mut encoded)? {
if encoded_img_exceeds_bytes(context, &new_img, max_bytes, &mut encoded)? {
if img_wh < 20 {
return Err(format_err!(
"Failed to scale image to below {}B",
@@ -413,7 +416,8 @@ impl<'a> BlobObject<'a> {
}
info!(
"Final scaled-down image size: {}B ({}px).",
context,
"Final scaled-down image size: {}B ({}px)",
encoded.len(),
img_wh
);
@@ -442,7 +446,7 @@ impl<'a> BlobObject<'a> {
})
}
pub fn get_exif_orientation(&self) -> Result<i32> {
pub fn get_exif_orientation(&self, context: &Context) -> Result<i32> {
let file = std::fs::File::open(self.to_abs_path())?;
let mut bufreader = std::io::BufReader::new(&file);
let exifreader = exif::Reader::new();
@@ -454,7 +458,7 @@ impl<'a> BlobObject<'a> {
Some(3) => return Ok(180),
Some(6) => return Ok(90),
Some(8) => return Ok(270),
other => warn!("Exif orientation value ignored: {other:?}."),
other => warn!(context, "exif orientation value ignored: {:?}", other),
}
}
Ok(0)
@@ -486,7 +490,7 @@ impl<'a> BlobDirContents<'a> {
match entry {
Ok(entry) => Some(entry),
Err(err) => {
error!("Failed to read blob file: {err:#}.");
error!(context, "Failed to read blob file: {err}");
None
}
}
@@ -496,7 +500,8 @@ impl<'a> BlobDirContents<'a> {
true => Some(entry.path()),
false => {
warn!(
"Export: Found blob dir entry {} that is not a file, ignoring.",
context,
"Export: Found blob dir entry {} that is not a file, ignoring",
entry.path().display()
);
None
@@ -538,7 +543,7 @@ impl<'a> Iterator for BlobDirIter<'a> {
// silently skipping them is fine.
match BlobObject::from_path(self.context, path) {
Ok(blob) => return Some(blob),
Err(err) => warn!("{err:#}"),
Err(err) => warn!(self.context, "{err}"),
}
}
None
@@ -554,6 +559,7 @@ fn encode_img(img: &DynamicImage, encoded: &mut Vec<u8>) -> anyhow::Result<()> {
Ok(())
}
fn encoded_img_exceeds_bytes(
context: &Context,
img: &DynamicImage,
max_bytes: Option<usize>,
encoded: &mut Vec<u8>,
@@ -562,7 +568,8 @@ fn encoded_img_exceeds_bytes(
encode_img(img, encoded)?;
if encoded.len() > max_bytes {
info!(
"Image size {}B ({}x{}px) exceeds {}B, need to scale down.",
context,
"image size {}B ({}x{}px) exceeds {}B, need to scale down",
encoded.len(),
img.width(),
img.height(),
@@ -816,7 +823,7 @@ mod tests {
let blob = BlobObject::new_from_path(&t, &avatar_blob).await.unwrap();
blob.recode_to_size(blob.to_abs_path(), 1000, Some(3000))
blob.recode_to_size(&t, blob.to_abs_path(), 1000, Some(3000))
.unwrap();
assert!(file_size(&avatar_blob).await <= 3000);
assert!(file_size(&avatar_blob).await > 2000);
@@ -1000,7 +1007,7 @@ mod tests {
check_image_size(&file, original_width, original_height);
let blob = BlobObject::new_from_path(&alice, &file).await?;
assert_eq!(blob.get_exif_orientation().unwrap_or(0), orientation);
assert_eq!(blob.get_exif_orientation(&alice).unwrap_or(0), orientation);
let mut msg = Message::new(Viewtype::Image);
msg.set_file(file.to_str().unwrap(), None);
@@ -1021,7 +1028,7 @@ mod tests {
let file = bob_msg.get_file(&bob).unwrap();
let blob = BlobObject::new_from_path(&bob, &file).await?;
assert_eq!(blob.get_exif_orientation().unwrap_or(0), 0);
assert_eq!(blob.get_exif_orientation(&bob).unwrap_or(0), 0);
let img = check_image_size(file, compressed_width, compressed_height);
Ok(img)

View File

@@ -10,7 +10,6 @@ use std::time::{Duration, SystemTime};
use anyhow::{bail, ensure, Context as _, Result};
use deltachat_derive::{FromSql, ToSql};
use serde::{Deserialize, Serialize};
use tracing::{error, info, warn};
use crate::aheader::EncryptPreference;
use crate::blob::BlobObject;
@@ -246,7 +245,10 @@ impl ChatId {
Contact::scaleup_origin_by_id(context, contact_id, Origin::CreateChat).await?;
chat_id
} else {
warn!("Cannot create chat, contact {contact_id} does not exist.");
warn!(
context,
"Cannot create chat, contact {} does not exist.", contact_id,
);
bail!("Can not create chat for non-existing contact");
}
}
@@ -282,7 +284,12 @@ impl ChatId {
let chat_id = ChatId::new(u32::try_from(row_id)?);
info!(
"Created group/mailinglist '{grpname}' grpid={grpid} as {chat_id}, blocked={create_blocked}."
context,
"Created group/mailinglist '{}' grpid={} as {}, blocked={}",
grpname,
grpid,
chat_id,
create_blocked,
);
Ok(chat_id)
@@ -329,13 +336,16 @@ impl ChatId {
Chattype::Single => {
for contact_id in get_chat_contacts(context, self).await? {
if contact_id != ContactId::SELF {
info!("Blocking the contact {contact_id} to block 1:1 chat.");
info!(
context,
"Blocking the contact {} to block 1:1 chat", contact_id
);
Contact::block(context, contact_id).await?;
}
}
}
Chattype::Group => {
info!("Can't block groups yet, deleting the chat.");
info!(context, "Can't block groups yet, deleting the chat");
self.delete(context).await?;
}
Chattype::Mailinglist => {
@@ -400,7 +410,7 @@ impl ChatId {
let chat = Chat::load_from_db(context, self).await?;
if protect == chat.protected {
info!("Protection status unchanged for {self}.");
info!(context, "Protection status unchanged for {}.", self);
return Ok(());
}
@@ -490,7 +500,7 @@ impl ChatId {
let chat = Chat::load_from_db(context, self).await?;
if let Err(e) = self.inner_set_protection(context, protect).await {
error!("Cannot set protection: {e:#}."); // make error user-visible
error!(context, "Cannot set protection: {}", e); // make error user-visible
return Err(e);
}
@@ -1058,7 +1068,10 @@ impl ChatId {
!self.is_special(),
"can not set gossiped timestamp for special chats"
);
info!("Set gossiped_timestamp for chat {self} to {timestamp}.");
info!(
context,
"set gossiped_timestamp for chat {} to {}.", self, timestamp,
);
context
.sql
@@ -1196,7 +1209,10 @@ impl Chat {
}
}
Err(err) => {
error!("Failed to load contacts for {}: {:#}.", chat.id, err);
error!(
context,
"failed to load contacts for {}: {:#}", chat.id, err
);
}
}
chat.name = chat_name;
@@ -1459,7 +1475,10 @@ impl Chat {
{
to_id = id;
} else {
error!("Cannot send message, contact for {} not found.", self.id,);
error!(
context,
"Cannot send message, contact for {} not found.", self.id,
);
bail!("Cannot set message, contact for {} not found.", self.id);
}
} else if self.typ == Chattype::Group
@@ -2013,8 +2032,8 @@ async fn prepare_msg_blob(context: &Context, msg: &mut Message) -> Result<()> {
.with_context(|| format!("attachment missing for message of type #{}", msg.viewtype))?;
if msg.viewtype == Viewtype::Image {
if let Err(err) = blob.recode_to_image_size(context).await {
warn!("Cannot recode image, using original data: {err:#}.");
if let Err(e) = blob.recode_to_image_size(context).await {
warn!(context, "Cannot recode image, using original data: {:?}", e);
}
}
msg.param.set(Param::File, blob.as_name());
@@ -2054,6 +2073,7 @@ async fn prepare_msg_blob(context: &Context, msg: &mut Message) -> Result<()> {
}
info!(
context,
"Attaching \"{}\" for message type #{}.",
blob.to_abs_path().display(),
msg.viewtype
@@ -2236,7 +2256,7 @@ async fn create_send_msg_job(context: &Context, msg_id: MsgId) -> Result<Option<
let attach_selfavatar = match shall_attach_selfavatar(context, msg.chat_id).await {
Ok(attach_selfavatar) => attach_selfavatar,
Err(err) => {
warn!("SMTP job cannot get selfavatar-state: {err:#}.");
warn!(context, "job: cannot get selfavatar-state: {:#}", err);
false
}
};
@@ -2261,7 +2281,10 @@ async fn create_send_msg_job(context: &Context, msg_id: MsgId) -> Result<Option<
if recipients.is_empty() {
// may happen eg. for groups with only SELF and bcc_self disabled
info!("Message {msg_id} has no recipient, skipping smtp-send.");
info!(
context,
"message {} has no recipient, skipping smtp-send", msg_id
);
msg_id.set_delivered(context).await?;
return Ok(None);
}
@@ -2295,27 +2318,27 @@ async fn create_send_msg_job(context: &Context, msg_id: MsgId) -> Result<Option<
if 0 != rendered_msg.last_added_location_id {
if let Err(err) = location::set_kml_sent_timestamp(context, msg.chat_id, time()).await {
error!("Failed to set kml sent_timestamp: {err:#}.");
error!(context, "Failed to set kml sent_timestamp: {:#}", err);
}
if !msg.hidden {
if let Err(err) =
location::set_msg_location_id(context, msg.id, rendered_msg.last_added_location_id)
.await
{
error!("Failed to set msg_location_id: {err:#}.");
error!(context, "Failed to set msg_location_id: {:#}", err);
}
}
}
if let Some(sync_ids) = rendered_msg.sync_ids_to_delete {
if let Err(err) = context.delete_sync_ids(sync_ids).await {
error!("Failed to delete sync ids: {err:#}.");
error!(context, "Failed to delete sync ids: {:#}", err);
}
}
if attach_selfavatar {
if let Err(err) = msg.chat_id.set_selfavatar_timestamp(context, time()).await {
error!("Failed to set selfavatar timestamp: {err:#}.");
error!(context, "Failed to set selfavatar timestamp: {:#}", err);
}
}
@@ -2669,7 +2692,9 @@ pub(crate) async fn mark_old_messages_as_noticed(
if !changed_chats.is_empty() {
info!(
"Marking chats as noticed because there are newer outgoing messages: {changed_chats:?}."
context,
"Marking chats as noticed because there are newer outgoing messages: {:?}",
changed_chats
);
}
@@ -2988,7 +3013,10 @@ pub(crate) async fn add_contact_to_chat_ex(
if context.is_self_addr(contact.get_addr()).await? {
// ourself is added using ContactId::SELF, do not add this address explicitly.
// if SELF is not in the group, members cannot be added at all.
warn!("Invalid attempt to add self e-mail address to group.");
warn!(
context,
"invalid attempt to add self e-mail address to group"
);
return Ok(false);
}
@@ -3001,7 +3029,10 @@ pub(crate) async fn add_contact_to_chat_ex(
if chat.is_protected()
&& contact.is_verified(context).await? != VerifiedStatus::BidirectVerified
{
error!("Only bidirectional verified contacts can be added to protected chats.");
error!(
context,
"Only bidirectional verified contacts can be added to protected chats."
);
return Ok(false);
}
if is_contact_in_chat(context, chat_id, contact_id).await? {
@@ -3531,7 +3562,7 @@ pub async fn add_device_msg_with_importance(
if let Some(label) = label {
if was_device_msg_ever_added(context, label).await? {
info!("Device-message {label} already added.");
info!(context, "device-message {} already added", label);
return Ok(msg_id);
}
}

View File

@@ -1,7 +1,6 @@
//! # Chat list module.
use anyhow::{ensure, Context as _, Result};
use tracing::warn;
use crate::chat::{update_special_chat_names, Chat, ChatId, ChatVisibility};
use crate::constants::{
@@ -177,7 +176,7 @@ impl Chatlist {
// allow searching over special names that may change at any time
// when the ui calls set_stock_translation()
if let Err(err) = update_special_chat_names(context).await {
warn!("Cannot update special chat names: {err:#}.")
warn!(context, "cannot update special chat names: {:?}", err)
}
let str_like_cmd = format!("%{query}%");

View File

@@ -13,7 +13,6 @@ use futures_lite::FutureExt as _;
use percent_encoding::{utf8_percent_encode, NON_ALPHANUMERIC};
use server_params::{expand_param_vector, ServerParams};
use tokio::task;
use tracing::{info, warn};
use crate::config::Config;
use crate::contact::addr_cmp;
@@ -101,7 +100,7 @@ impl Context {
}
async fn inner_configure(&self) -> Result<()> {
info!("Configure ...");
info!(self, "Configure ...");
let mut param = LoginParam::load_candidate_params(self).await?;
let old_addr = self.get_config(Config::ConfiguredAddr).await?;
@@ -126,10 +125,13 @@ async fn on_configure_completed(
if let Some(config_defaults) = &provider.config_defaults {
for def in config_defaults.iter() {
if !context.config_exists(def.key).await? {
info!("apply config_defaults {}={}", def.key, def.value);
info!(context, "apply config_defaults {}={}", def.key, def.value);
context.set_config(def.key, Some(def.value)).await?;
} else {
info!("skip already set config_defaults {}={}", def.key, def.value);
info!(
context,
"skip already set config_defaults {}={}", def.key, def.value
);
}
}
}
@@ -141,7 +143,7 @@ async fn on_configure_completed(
.await
.is_err()
{
warn!("cannot add after_login_hint as core-provider-info");
warn!(context, "cannot add after_login_hint as core-provider-info");
}
}
}
@@ -184,7 +186,7 @@ async fn configure(ctx: &Context, param: &mut LoginParam) -> Result<()> {
.await?
.and_then(|e| e.parse().ok())
{
info!("Authorized address is {oauth2_addr}");
info!(ctx, "Authorized address is {}", oauth2_addr);
param.addr = oauth2_addr;
ctx.sql
.set_raw_config("addr", Some(param.addr.as_str()))
@@ -213,17 +215,22 @@ async fn configure(ctx: &Context, param: &mut LoginParam) -> Result<()> {
{
// no advanced parameters entered by the user: query provider-database or do Autoconfig
info!("checking internal provider-info for offline autoconfig");
info!(
ctx,
"checking internal provider-info for offline autoconfig"
);
if let Some(provider) = provider::get_provider_info(&param_domain, socks5_enabled).await {
if let Some(provider) =
provider::get_provider_info(ctx, &param_domain, socks5_enabled).await
{
param.provider = Some(provider);
match provider.status {
provider::Status::Ok | provider::Status::Preparation => {
if provider.server.is_empty() {
info!("offline autoconfig found, but no servers defined");
info!(ctx, "offline autoconfig found, but no servers defined");
param_autoconfig = None;
} else {
info!("offline autoconfig found");
info!(ctx, "offline autoconfig found");
let servers = provider
.server
.iter()
@@ -250,17 +257,17 @@ async fn configure(ctx: &Context, param: &mut LoginParam) -> Result<()> {
}
}
provider::Status::Broken => {
info!("offline autoconfig found, provider is broken");
info!(ctx, "offline autoconfig found, provider is broken");
param_autoconfig = None;
}
}
} else {
// Try receiving autoconfig
info!("no offline autoconfig found");
info!(ctx, "no offline autoconfig found");
param_autoconfig = if socks5_enabled {
// Currently we can't do http requests through socks5, to not leak
// the ip, just don't do online autoconfig
info!("socks5 enabled, skipping autoconfig");
info!(ctx, "socks5 enabled, skipping autoconfig");
None
} else {
get_autoconfig(ctx, param, &param_domain, &param_addr_urlencoded).await
@@ -458,7 +465,7 @@ async fn configure(ctx: &Context, param: &mut LoginParam) -> Result<()> {
progress!(ctx, 920);
e2ee::ensure_secret_key_exists(ctx).await?;
info!("key generation completed");
info!(ctx, "key generation completed");
ctx.set_config_bool(Config::FetchedExistingMsgs, false)
.await?;
@@ -571,13 +578,13 @@ async fn try_imap_one_param(
"None".to_string()
}
);
info!("Trying: {}", inf);
info!(context, "Trying: {}", inf);
let (_s, r) = async_channel::bounded(1);
let mut imap = match Imap::new(param, socks5_config.clone(), addr, provider_strict_tls, r) {
Err(err) => {
info!("failure: {:#}", err);
info!(context, "failure: {:#}", err);
return Err(ConfigurationError {
config: inf,
msg: format!("{err:#}"),
@@ -588,14 +595,14 @@ async fn try_imap_one_param(
match imap.connect(context).await {
Err(err) => {
info!("failure: {:#}", err);
info!(context, "failure: {:#}", err);
Err(ConfigurationError {
config: inf,
msg: format!("{err:#}"),
})
}
Ok(()) => {
info!("success: {}", inf);
info!(context, "success: {}", inf);
Ok(imap)
}
}
@@ -623,19 +630,19 @@ async fn try_smtp_one_param(
"None".to_string()
}
);
info!("Trying: {inf}");
info!(context, "Trying: {}", inf);
if let Err(err) = smtp
.connect(context, param, socks5_config, addr, provider_strict_tls)
.await
{
info!("failure: {err:#}");
info!(context, "failure: {}", err);
Err(ConfigurationError {
config: inf,
msg: format!("{err:#}"),
})
} else {
info!("success: {inf}");
info!(context, "success: {}", inf);
smtp.disconnect().await;
Ok(())
}

View File

@@ -1,10 +1,10 @@
//! # Thunderbird's Autoconfiguration implementation
//!
//! Documentation: <https://developer.mozilla.org/en-US/docs/Mozilla/Thunderbird/Autoconfiguration>
use quick_xml::events::{BytesStart, Event};
use std::io::BufRead;
use std::str::FromStr;
use tracing::warn;
use quick_xml::events::{BytesStart, Event};
use super::read_url::read_url;
use super::{Error, ServerParams};
@@ -263,7 +263,10 @@ pub(crate) async fn moz_autoconfigure(
let res = parse_serverparams(&param_in.addr, &xml_raw);
if let Err(err) = &res {
warn!("Failed to parse Thunderbird autoconfiguration XML: {err:#}");
warn!(
context,
"Failed to parse Thunderbird autoconfiguration XML: {}", err
);
}
res
}

View File

@@ -6,7 +6,6 @@
use std::io::BufRead;
use quick_xml::events::Event;
use tracing::warn;
use super::read_url::read_url;
use super::{Error, ServerParams};
@@ -203,7 +202,7 @@ pub(crate) async fn outlk_autodiscover(
let xml_raw = read_url(context, &url).await?;
let res = parse_xml(&xml_raw);
if let Err(err) = &res {
warn!("{err:#}");
warn!(context, "{}", err);
}
match res? {
ParsingResult::RedirectUrl(redirect_url) => url = redirect_url,

View File

@@ -1,5 +1,4 @@
use anyhow::{anyhow, format_err};
use tracing::info;
use crate::context::Context;
use crate::socks::Socks5Config;
@@ -7,12 +6,12 @@ use crate::socks::Socks5Config;
pub async fn read_url(context: &Context, url: &str) -> anyhow::Result<String> {
match read_url_inner(context, url).await {
Ok(s) => {
info!("Successfully read url {url}");
info!(context, "Successfully read url {}", url);
Ok(s)
}
Err(e) => {
info!("Can't read URL {url}: {e:#}");
Err(format_err!("Can't read URL {url}: {e:#}"))
info!(context, "Can't read URL {}: {:#}", url, e);
Err(format_err!("Can't read URL {}: {:#}", url, e))
}
}
}
@@ -33,7 +32,7 @@ pub async fn read_url_inner(context: &Context, url: &str) -> anyhow::Result<Stri
.last()
.ok_or_else(|| anyhow!("Redirection doesn't have a target location"))?
.to_str()?;
info!("Following redirect to {header}");
info!(context, "Following redirect to {}", header);
url = header.to_string();
continue;
}

View File

@@ -17,16 +17,12 @@ use rusqlite::OptionalExtension;
use serde::{Deserialize, Serialize};
use tokio::task;
use tokio::time::{timeout, Duration};
use tracing::instrument::WithSubscriber;
use tracing::Instrument;
use tracing::{info, warn};
use crate::aheader::EncryptPreference;
use crate::chat::ChatId;
use crate::color::str_to_color;
use crate::config::Config;
use crate::constants::{Blocked, Chattype, DC_GCL_ADD_SELF, DC_GCL_VERIFIED_ONLY};
use crate::context::future::ContextIdFutureExt;
use crate::context::Context;
use crate::events::EventType;
use crate::key::{DcKey, SignedPublicKey};
@@ -550,7 +546,7 @@ impl Contact {
// Filter out use-once addresses (like reply+AEJDGPOECLAP...@reply.github.com):
|| (addr.len() > 50 && addr.contains('+'))
{
info!("hiding contact {addr}");
info!(context, "hiding contact {}", addr);
origin = Origin::Hidden;
// For these kind of email addresses, sender and address often don't belong together
// (like hocuri <notifications@github.com>). In this example, hocuri shouldn't
@@ -703,7 +699,7 @@ impl Contact {
sth_modified = Modifier::Created;
row_id = u32::try_from(transaction.last_insert_rowid())?;
info!("added contact id={} addr={}", row_id, &addr);
info!(context, "added contact id={} addr={}", row_id, &addr);
}
Ok(row_id)
}).await?;
@@ -743,12 +739,15 @@ impl Contact {
}
}
Err(err) => {
warn!("Failed to add address {addr} from address book: {err:#}");
warn!(
context,
"Failed to add address {} from address book: {}", addr, err
);
}
}
}
Err(err) => {
warn!("{err:#}.");
warn!(context, "{:#}.", err);
}
}
}
@@ -1056,7 +1055,7 @@ impl Contact {
}
/// Updates `param` column in the database.
pub(crate) async fn update_param(&self, context: &Context) -> Result<()> {
pub async fn update_param(&self, context: &Context) -> Result<()> {
context
.sql
.execute(
@@ -1068,7 +1067,7 @@ impl Contact {
}
/// Updates `status` column in the database.
pub(crate) async fn update_status(&self, context: &Context) -> Result<()> {
pub async fn update_status(&self, context: &Context) -> Result<()> {
context
.sql
.execute(
@@ -1389,7 +1388,7 @@ pub(crate) async fn set_profile_image(
.set_config(Config::Selfavatar, Some(profile_image))
.await?;
} else {
info!("Do not use unencrypted selfavatar.");
info!(context, "Do not use unencrypted selfavatar.");
}
} else {
contact.param.set(Param::ProfileImage, profile_image);
@@ -1401,7 +1400,7 @@ pub(crate) async fn set_profile_image(
if was_encrypted {
context.set_config(Config::Selfavatar, None).await?;
} else {
info!("Do not use unencrypted selfavatar deletion.");
info!(context, "Do not use unencrypted selfavatar deletion.");
}
} else {
contact.param.remove(Param::ProfileImage);
@@ -1556,12 +1555,7 @@ impl RecentlySeenLoop {
pub(crate) fn new(context: Context) -> Self {
let (interrupt_send, interrupt_recv) = channel::bounded(1);
let handle = task::spawn(
Self::run(context, interrupt_recv)
.with_current_subscriber()
.bind_current_context_id()
.in_current_span(),
);
let handle = task::spawn(Self::run(context, interrupt_recv));
Self {
handle,
interrupt_send,
@@ -1613,6 +1607,7 @@ impl RecentlySeenLoop {
if let Ok(duration) = until.duration_since(now) {
info!(
context,
"Recently seen loop waiting for {} or interrupt",
duration_to_str(duration)
);
@@ -1626,7 +1621,10 @@ impl RecentlySeenLoop {
}
}
Ok(Err(err)) => {
warn!("Error receiving an interruption in recently seen loop: {err:#}.");
warn!(
context,
"Error receiving an interruption in recently seen loop: {}", err
);
// Maybe the sender side is closed.
// Terminate the loop to avoid looping indefinitely.
return;
@@ -1640,7 +1638,10 @@ impl RecentlySeenLoop {
}
}
} else {
info!("Recently seen loop is not waiting, event is already due.");
info!(
context,
"Recently seen loop is not waiting, event is already due."
);
// Event is already in the past.
if let Some(contact_id) = contact_id {

View File

@@ -13,14 +13,13 @@ use async_channel::{self as channel, Receiver, Sender};
use ratelimit::Ratelimit;
use tokio::sync::{Mutex, RwLock};
use tokio::task;
use tracing::{info, warn};
use crate::chat::{get_chat_cnt, ChatId};
use crate::config::Config;
use crate::constants::DC_VERSION_STR;
use crate::contact::Contact;
use crate::debug_logging::DebugEventLogData;
use crate::events::{Event, EventEmitter, EventLayer, EventType, Events};
use crate::events::{Event, EventEmitter, EventType, Events};
use crate::key::{DcKey, SignedPublicKey};
use crate::login_param::LoginParam;
use crate::message::{self, MessageState, MsgId};
@@ -31,8 +30,6 @@ use crate::stock_str::StockStrings;
use crate::timesmearing::SmearedTimestamp;
use crate::tools::{duration_to_str, time};
pub mod future;
/// Builder for the [`Context`].
///
/// Many arguments to the [`Context`] are kind of optional and only needed to handle
@@ -236,6 +233,12 @@ pub struct InnerContext {
creation_time: SystemTime,
/// The text of the last error logged and emitted as an event.
/// If the ui wants to display an error after a failure,
/// `last_error` should be used to avoid races with the event thread.
pub(crate) last_error: std::sync::RwLock<String>,
/// If debug logging is enabled, this contains all necessary information
pub(crate) debug_logging: RwLock<Option<DebugLogging>>,
}
@@ -293,7 +296,7 @@ impl Context {
events: Events,
stock_strings: StockStrings,
) -> Result<Context> {
let context = Self::new_closed(dbfile, id, events.clone(), stock_strings).await?;
let context = Self::new_closed(dbfile, id, events, stock_strings).await?;
// Open the database if is not encrypted.
if context.check_passphrase("".to_string()).await? {
@@ -302,11 +305,6 @@ impl Context {
Ok(context)
}
/// Returns `tracing` subscriber layer.
pub fn to_layer(&self) -> EventLayer {
self.events.to_layer()
}
/// Creates new context without opening the database.
pub async fn new_closed(
dbfile: &Path,
@@ -384,6 +382,7 @@ impl Context {
server_id: RwLock::new(None),
creation_time: std::time::SystemTime::now(),
last_full_folder_scan: Mutex::new(None),
last_error: std::sync::RwLock::new("".to_string()),
debug_logging: RwLock::new(None),
};
@@ -397,7 +396,7 @@ impl Context {
/// Starts the IO scheduler.
pub async fn start_io(&self) {
if let Ok(false) = self.is_configured().await {
warn!("can not start io on a context that is not configured");
warn!(self, "can not start io on a context that is not configured");
return;
}
self.scheduler.start(self.clone()).await;
@@ -540,13 +539,13 @@ impl Context {
match &*s {
RunningState::Running { cancel_sender } => {
if let Err(err) = cancel_sender.send(()).await {
warn!("could not cancel ongoing: {:#}", err);
warn!(self, "could not cancel ongoing: {:#}", err);
}
info!("Signaling the ongoing process to stop ASAP.",);
info!(self, "Signaling the ongoing process to stop ASAP.",);
*s = RunningState::ShallStop;
}
RunningState::ShallStop | RunningState::Stopped => {
info!("No ongoing process to stop.",);
info!(self, "No ongoing process to stop.",);
}
}
}

View File

@@ -1,83 +0,0 @@
//! Futures extensions to track current context ID.
use pin_project_lite::pin_project;
use std::cell::RefCell;
use std::future::Future;
use std::pin::Pin;
use std::task::{Context, Poll};
use std::thread_local;
thread_local! {
static THREAD_CONTEXT_ID: RefCell<u32> = RefCell::new(0);
}
pub(crate) struct ContextIdGuard {
previous: u32,
}
pub(crate) fn current_context_id() -> u32 {
THREAD_CONTEXT_ID.with(|context_id| *context_id.borrow())
}
impl ContextIdGuard {
fn new(context_id: u32) -> Self {
let previous = THREAD_CONTEXT_ID.with(|prev_context_id| {
let ret = *prev_context_id.borrow();
*prev_context_id.borrow_mut() = context_id;
ret
});
Self { previous }
}
}
impl Drop for ContextIdGuard {
fn drop(&mut self) {
THREAD_CONTEXT_ID.with(|context_id| {
*context_id.borrow_mut() = self.previous;
})
}
}
pin_project! {
/// A future with attached context ID.
#[derive(Debug, Clone)]
pub struct ContextIdFuture<F> {
context_id: u32,
#[pin]
future: F,
}
}
impl<F> ContextIdFuture<F> {
/// Wraps a future.
pub fn new(context_id: u32, future: F) -> Self {
Self { context_id, future }
}
}
impl<F: Future> Future for ContextIdFuture<F> {
type Output = F::Output;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let context_id = self.context_id;
let this = self.project();
let _guard = ContextIdGuard::new(context_id);
this.future.poll(cx)
}
}
/// Future extension to bind context ID.
pub trait ContextIdFutureExt: Sized {
/// Binds context ID to the future.
fn bind_context_id(self, context_id: u32) -> ContextIdFuture<Self> {
ContextIdFuture::new(context_id, self)
}
/// Binds current context ID to the future.
fn bind_current_context_id(self) -> ContextIdFuture<Self> {
self.bind_context_id(current_context_id())
}
}
impl<F> ContextIdFutureExt for F where F: Future {}

View File

@@ -12,7 +12,6 @@ use async_channel::{self as channel, Receiver};
use serde_json::json;
use std::path::PathBuf;
use tokio::task;
use tracing::info;
/// Store all information needed to log an event to a webxdc.
pub struct DebugEventLogData {
@@ -133,7 +132,7 @@ pub(crate) async fn set_debug_logging_xdc(ctx: &Context, id: Option<MsgId>) -> a
});
}
}
info!("replacing logging webxdc");
info!(ctx, "replacing logging webxdc");
}
// Delete current debug logging
None => {
@@ -141,7 +140,7 @@ pub(crate) async fn set_debug_logging_xdc(ctx: &Context, id: Option<MsgId>) -> a
.set_raw_config(Config::DebugLogging.as_ref(), None)
.await?;
*ctx.debug_logging.write().await = None;
info!("removing logging webxdc");
info!(ctx, "removing logging webxdc");
}
}
Ok(())

View File

@@ -5,7 +5,6 @@ use std::str::FromStr;
use anyhow::Result;
use mailparse::ParsedMail;
use tracing::{info, warn};
use crate::aheader::Aheader;
use crate::authres::handle_authres;
@@ -25,6 +24,7 @@ use crate::pgp;
///
/// If the message is wrongly signed, HashSet will be empty.
pub fn try_decrypt(
context: &Context,
mail: &ParsedMail<'_>,
private_keyring: &Keyring<SignedSecretKey>,
public_keyring_for_validate: &Keyring<SignedPublicKey>,
@@ -36,7 +36,7 @@ pub fn try_decrypt(
None => return Ok(None),
Some(res) => res,
};
info!("Detected Autocrypt-mime message");
info!(context, "Detected Autocrypt-mime message");
decrypt_part(
encrypted_data_part,
@@ -54,6 +54,7 @@ pub(crate) async fn prepare_decryption(
if mail.headers.get_header(HeaderDef::ListPost).is_some() {
if mail.headers.get_header(HeaderDef::Autocrypt).is_some() {
info!(
context,
"Ignoring autocrypt header since this is a mailing list message. \
NOTE: For privacy reasons, the mailing list software should remove Autocrypt headers."
);
@@ -77,13 +78,13 @@ pub(crate) async fn prepare_decryption(
Ok(header) if addr_cmp(&header.addr, from) => Some(header),
Ok(header) => {
warn!(
"Autocrypt header address {:?} is not {:?}.",
header.addr, from
context,
"Autocrypt header address {:?} is not {:?}.", header.addr, from
);
None
}
Err(err) => {
warn!("Failed to parse Autocrypt header: {:#}.", err);
warn!(context, "Failed to parse Autocrypt header: {:#}.", err);
None
}
}
@@ -318,8 +319,8 @@ pub(crate) async fn get_autocrypt_peerstate(
peerstate.save_to_db(&context.sql).await?;
} else {
info!(
"Refusing to update existing peerstate of {}",
&peerstate.addr
context,
"Refusing to update existing peerstate of {}", &peerstate.addr
);
}
}

View File

@@ -6,7 +6,6 @@ use std::collections::BTreeMap;
use anyhow::{anyhow, Result};
use deltachat_derive::{FromSql, ToSql};
use serde::{Deserialize, Serialize};
use tracing::{info, warn};
use crate::config::Config;
use crate::context::Context;
@@ -125,7 +124,7 @@ impl Job {
/// Called in response to `Action::DownloadMsg`.
pub(crate) async fn download_msg(&self, context: &Context, imap: &mut Imap) -> Status {
if let Err(err) = imap.prepare(context).await {
warn!("download: could not connect: {:#}", err);
warn!(context, "download: could not connect: {:#}", err);
return Status::RetryNow;
}
@@ -196,7 +195,7 @@ impl Imap {
}
// we are connected, and the folder is selected
info!("Downloading message {}/{} fully...", folder, uid);
info!(context, "Downloading message {}/{} fully...", folder, uid);
let mut uid_message_ids: BTreeMap<u32, String> = BTreeMap::new();
uid_message_ids.insert(uid, rfc724_mid);
@@ -241,7 +240,7 @@ impl MimeMessage {
text += format!(" [{until}]").as_str();
};
info!("Partial download: {}", text);
info!(context, "Partial download: {}", text);
self.parts.push(Part {
typ: Viewtype::Text,

View File

@@ -2,7 +2,6 @@
use anyhow::{format_err, Context as _, Result};
use num_traits::FromPrimitive;
use tracing::info;
use crate::aheader::{Aheader, EncryptPreference};
use crate::config::Config;
@@ -52,6 +51,7 @@ impl EncryptHelper {
/// Returns an error if `e2ee_guaranteed` is true, but one or more keys are missing.
pub fn should_encrypt(
&self,
context: &Context,
e2ee_guaranteed: bool,
peerstates: &[(Option<Peerstate>, &str)],
) -> Result<bool> {
@@ -63,7 +63,10 @@ impl EncryptHelper {
for (peerstate, addr) in peerstates {
match peerstate {
Some(peerstate) => {
info!("peerstate for {:?} is {}", addr, peerstate.prefer_encrypt);
info!(
context,
"peerstate for {:?} is {}", addr, peerstate.prefer_encrypt
);
match peerstate.prefer_encrypt {
EncryptPreference::NoPreference => {}
EncryptPreference::Mutual => prefer_encrypt_count += 1,
@@ -79,7 +82,7 @@ impl EncryptHelper {
if e2ee_guaranteed {
return Err(format_err!("{}", msg));
} else {
info!("{}", msg);
info!(context, "{}", msg);
return Ok(false);
}
}
@@ -319,22 +322,22 @@ Sent with my Delta Chat Messenger: https://delta.chat";
// test with EncryptPreference::NoPreference:
// if e2ee_eguaranteed is unset, there is no encryption as not more than half of peers want encryption
let ps = new_peerstates(EncryptPreference::NoPreference);
assert!(encrypt_helper.should_encrypt(true, &ps).unwrap());
assert!(!encrypt_helper.should_encrypt(false, &ps).unwrap());
assert!(encrypt_helper.should_encrypt(&t, true, &ps).unwrap());
assert!(!encrypt_helper.should_encrypt(&t, false, &ps).unwrap());
// test with EncryptPreference::Reset
let ps = new_peerstates(EncryptPreference::Reset);
assert!(encrypt_helper.should_encrypt(true, &ps).unwrap());
assert!(!encrypt_helper.should_encrypt(false, &ps).unwrap());
assert!(encrypt_helper.should_encrypt(&t, true, &ps).unwrap());
assert!(!encrypt_helper.should_encrypt(&t, false, &ps).unwrap());
// test with EncryptPreference::Mutual (self is also Mutual)
let ps = new_peerstates(EncryptPreference::Mutual);
assert!(encrypt_helper.should_encrypt(true, &ps).unwrap());
assert!(encrypt_helper.should_encrypt(false, &ps).unwrap());
assert!(encrypt_helper.should_encrypt(&t, true, &ps).unwrap());
assert!(encrypt_helper.should_encrypt(&t, false, &ps).unwrap());
// test with missing peerstate
let ps = vec![(None, "bob@foo.bar")];
assert!(encrypt_helper.should_encrypt(true, &ps).is_err());
assert!(!encrypt_helper.should_encrypt(false, &ps).unwrap());
assert!(encrypt_helper.should_encrypt(&t, true, &ps).is_err());
assert!(!encrypt_helper.should_encrypt(&t, false, &ps).unwrap());
}
}

View File

@@ -72,7 +72,6 @@ use anyhow::{ensure, Result};
use async_channel::Receiver;
use serde::{Deserialize, Serialize};
use tokio::time::timeout;
use tracing::{error, info, warn};
use crate::chat::{send_msg, ChatId};
use crate::constants::{DC_CHAT_ID_LAST_SPECIAL, DC_CHAT_ID_TRASH};
@@ -226,8 +225,8 @@ impl ChatId {
msg.param.set_cmd(SystemMessage::EphemeralTimerChanged);
if let Err(err) = send_msg(context, self, &mut msg).await {
error!(
"Failed to send a message about ephemeral message timer change: {:?}",
err
context,
"Failed to send a message about ephemeral message timer change: {:?}", err
);
}
}
@@ -434,7 +433,7 @@ pub(crate) async fn delete_expired_messages(context: &Context, now: i64) -> Resu
let rows = select_expired_messages(context, now).await?;
if !rows.is_empty() {
info!("Attempting to delete {} messages.", rows.len());
info!(context, "Attempting to delete {} messages.", rows.len());
let (msgs_changed, webxdc_deleted) = context
.sql
@@ -524,7 +523,7 @@ async fn next_expiration_timestamp(context: &Context) -> Option<i64> {
.await
{
Err(err) => {
warn!("Can't calculate next ephemeral timeout: {}", err);
warn!(context, "Can't calculate next ephemeral timeout: {}", err);
None
}
Ok(ephemeral_timestamp) => ephemeral_timestamp,
@@ -534,8 +533,8 @@ async fn next_expiration_timestamp(context: &Context) -> Option<i64> {
match next_delete_device_after_timestamp(context).await {
Err(err) => {
warn!(
"Can't calculate timestamp of the next message expiration: {}",
err
context,
"Can't calculate timestamp of the next message expiration: {}", err
);
None
}
@@ -564,6 +563,7 @@ pub(crate) async fn ephemeral_loop(context: &Context, interrupt_receiver: Receiv
if let Ok(duration) = until.duration_since(now) {
info!(
context,
"Ephemeral loop waiting for deletion in {} or interrupt",
duration_to_str(duration)
);

View File

@@ -1,17 +1,10 @@
#![allow(missing_docs)]
//! # Events specification.
use std::fmt::Write;
use std::path::PathBuf;
use std::sync::{Arc, RwLock};
use async_channel::{self as channel, Receiver, Sender, TrySendError};
use serde::Serialize;
use tracing::Level;
use tracing_subscriber::registry::LookupSpan;
use tracing_subscriber::Layer;
use crate::chat::ChatId;
use crate::contact::ContactId;
use crate::ephemeral::Timer as EphemeralTimer;
@@ -23,11 +16,6 @@ use crate::webxdc::StatusUpdateSerial;
pub struct Events {
receiver: Receiver<Event>,
sender: Sender<Event>,
/// The text of the last error logged and emitted as an event.
/// If the ui wants to display an error after a failure,
/// `last_error` should be used to avoid races with the event thread.
pub(crate) last_error: Arc<RwLock<String>>,
}
impl Default for Events {
@@ -41,26 +29,11 @@ impl Events {
pub fn new() -> Self {
let (sender, receiver) = channel::bounded(1_000);
Self {
receiver,
sender,
last_error: Arc::new(RwLock::new("".to_string())),
}
Self { receiver, sender }
}
/// Set last error string.
/// Implemented as blocking as used from macros in different, not always async blocks.
pub fn set_last_error(&self, error: &str) {
let error = error.to_string();
let mut last_error = self.last_error.write().unwrap();
*last_error = error;
}
/// Emits an event into event channel.
/// Emits an event.
pub fn emit(&self, event: Event) {
if let EventType::Error(formatted) = &event.typ {
self.set_last_error(formatted);
}
match self.sender.try_send(event) {
Ok(()) => {}
Err(TrySendError::Full(event)) => {
@@ -80,89 +53,6 @@ impl Events {
pub fn get_emitter(&self) -> EventEmitter {
EventEmitter(self.receiver.clone())
}
/// Returns `tracing` subscriber layer.
pub fn to_layer(&self) -> EventLayer {
EventLayer::new(self.sender.clone(), self.last_error.clone())
}
}
#[derive(Debug)]
struct MessageStorage<'a>(&'a mut String);
impl tracing::field::Visit for MessageStorage<'_> {
fn record_debug(&mut self, field: &tracing::field::Field, value: &dyn std::fmt::Debug) {
if field.name() == "message" {
write!(self.0, "{value:?}").ok();
}
}
}
/// Tracing subscriber layer converting logs into Delta Chat events.
#[derive(Debug)]
pub struct EventLayer {
/// Event channel for event submission.
sender: Sender<Event>,
last_error: Arc<RwLock<String>>,
}
impl EventLayer {
pub(crate) fn new(sender: Sender<Event>, last_error: Arc<RwLock<String>>) -> Self {
Self { sender, last_error }
}
fn set_last_error(&self, error: &str) {
let error = error.to_string();
let mut last_error = self.last_error.write().unwrap();
*last_error = error;
}
}
impl<S> Layer<S> for EventLayer
where
S: tracing::Subscriber + for<'a> LookupSpan<'a>,
{
fn on_event(
&self,
event: &tracing::Event<'_>,
_ctx: tracing_subscriber::layer::Context<'_, S>,
) {
let context_id = crate::context::future::current_context_id();
let &level = event.metadata().level();
let mut message = "".to_string();
let mut visitor = MessageStorage(&mut message);
event.record(&mut visitor);
match level {
Level::ERROR => {
self.set_last_error(&message);
self.sender
.try_send(Event {
id: context_id,
typ: EventType::Error(message),
})
.ok();
}
Level::WARN => {
self.sender
.try_send(Event {
id: context_id,
typ: EventType::Warning(message),
})
.ok();
}
Level::INFO => {
self.sender
.try_send(Event {
id: context_id,
typ: EventType::Info(message),
})
.ok();
}
Level::TRACE | Level::DEBUG => {}
}
}
}
/// A receiver of events from a [`Context`].
@@ -243,9 +133,6 @@ pub enum EventType {
/// Emitted when an IMAP message has been moved
ImapMessageMoved(String),
/// Emitted before going into IDLE on the Inbox folder.
ImapInboxIdle,
/// Emitted when an new file in the $BLOBDIR was created
NewBlobFile(String),

View File

@@ -16,7 +16,6 @@ use futures::future::FutureExt;
use lettre_email::mime::{self, Mime};
use lettre_email::PartBuilder;
use mailparse::ParsedContentType;
use tracing::warn;
use crate::headerdef::{HeaderDef, HeaderDefMap};
use crate::message::{Message, MsgId};
@@ -88,7 +87,7 @@ impl HtmlMsgParser {
/// Function takes a raw mime-message string,
/// searches for the main-text part
/// and returns that as parser.html
pub async fn from_bytes(rawmime: &[u8]) -> Result<Self> {
pub async fn from_bytes(context: &Context, rawmime: &[u8]) -> Result<Self> {
let mut parser = HtmlMsgParser {
html: "".to_string(),
plain: None,
@@ -103,7 +102,7 @@ impl HtmlMsgParser {
parser.html = plain.to_html();
}
} else {
parser.cid_to_data_recursive(&parsedmail).await?;
parser.cid_to_data_recursive(context, &parsedmail).await?;
}
Ok(parser)
@@ -174,6 +173,7 @@ impl HtmlMsgParser {
/// This allows the final html-file to be self-contained.
fn cid_to_data_recursive<'a>(
&'a mut self,
context: &'a Context,
mail: &'a mailparse::ParsedMail<'a>,
) -> Pin<Box<dyn Future<Output = Result<()>> + 'a + Send>> {
// Boxed future to deal with recursion
@@ -181,7 +181,7 @@ impl HtmlMsgParser {
match get_mime_multipart_type(&mail.ctype) {
MimeMultipartType::Multiple => {
for cur_data in &mail.subparts {
self.cid_to_data_recursive(cur_data).await?;
self.cid_to_data_recursive(context, cur_data).await?;
}
Ok(())
}
@@ -191,7 +191,7 @@ impl HtmlMsgParser {
return Ok(());
}
let mail = mailparse::parse_mail(&raw).context("failed to parse mail")?;
self.cid_to_data_recursive(&mail).await
self.cid_to_data_recursive(context, &mail).await
}
MimeMultipartType::Single => {
let mimetype = mail.ctype.mimetype.parse::<Mime>()?;
@@ -214,8 +214,10 @@ impl HtmlMsgParser {
.to_string()
}
Err(e) => warn!(
context,
"Cannot create regex for cid: {} throws {}",
re_string, e
re_string,
e
),
}
}
@@ -247,15 +249,15 @@ impl MsgId {
let rawmime = message::get_mime_headers(context, self).await?;
if !rawmime.is_empty() {
match HtmlMsgParser::from_bytes(&rawmime).await {
match HtmlMsgParser::from_bytes(context, &rawmime).await {
Err(err) => {
warn!("get_html: parser error: {:#}", err);
warn!(context, "get_html: parser error: {:#}", err);
Ok(None)
}
Ok(parser) => Ok(Some(parser.html)),
}
} else {
warn!("get_html: no mime for {}", self);
warn!(context, "get_html: no mime for {}", self);
Ok(None)
}
}
@@ -284,8 +286,9 @@ mod tests {
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_htmlparse_plain_unspecified() {
let t = TestContext::new().await;
let raw = include_bytes!("../test-data/message/text_plain_unspecified.eml");
let parser = HtmlMsgParser::from_bytes(raw).await.unwrap();
let parser = HtmlMsgParser::from_bytes(&t.ctx, raw).await.unwrap();
assert_eq!(
parser.html,
r##"<!DOCTYPE html>
@@ -302,8 +305,9 @@ This message does not have Content-Type nor Subject.<br/>
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_htmlparse_plain_iso88591() {
let t = TestContext::new().await;
let raw = include_bytes!("../test-data/message/text_plain_iso88591.eml");
let parser = HtmlMsgParser::from_bytes(raw).await.unwrap();
let parser = HtmlMsgParser::from_bytes(&t.ctx, raw).await.unwrap();
assert_eq!(
parser.html,
r##"<!DOCTYPE html>
@@ -320,8 +324,9 @@ message with a non-UTF-8 encoding: äöüßÄÖÜ<br/>
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_htmlparse_plain_flowed() {
let t = TestContext::new().await;
let raw = include_bytes!("../test-data/message/text_plain_flowed.eml");
let parser = HtmlMsgParser::from_bytes(raw).await.unwrap();
let parser = HtmlMsgParser::from_bytes(&t.ctx, raw).await.unwrap();
assert!(parser.plain.unwrap().flowed);
assert_eq!(
parser.html,
@@ -342,8 +347,9 @@ and will be wrapped as usual.<br/>
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_htmlparse_alt_plain() {
let t = TestContext::new().await;
let raw = include_bytes!("../test-data/message/text_alt_plain.eml");
let parser = HtmlMsgParser::from_bytes(raw).await.unwrap();
let parser = HtmlMsgParser::from_bytes(&t.ctx, raw).await.unwrap();
assert_eq!(
parser.html,
r##"<!DOCTYPE html>
@@ -363,8 +369,9 @@ test some special html-characters as &lt; &gt; and &amp; but also &quot; and &#x
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_htmlparse_html() {
let t = TestContext::new().await;
let raw = include_bytes!("../test-data/message/text_html.eml");
let parser = HtmlMsgParser::from_bytes(raw).await.unwrap();
let parser = HtmlMsgParser::from_bytes(&t.ctx, raw).await.unwrap();
// on windows, `\r\n` linends are returned from mimeparser,
// however, rust multiline-strings use just `\n`;
@@ -380,8 +387,9 @@ test some special html-characters as &lt; &gt; and &amp; but also &quot; and &#x
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_htmlparse_alt_html() {
let t = TestContext::new().await;
let raw = include_bytes!("../test-data/message/text_alt_html.eml");
let parser = HtmlMsgParser::from_bytes(raw).await.unwrap();
let parser = HtmlMsgParser::from_bytes(&t.ctx, raw).await.unwrap();
assert_eq!(
parser.html.replace('\r', ""), // see comment in test_htmlparse_html()
r##"<html>
@@ -394,8 +402,9 @@ test some special html-characters as &lt; &gt; and &amp; but also &quot; and &#x
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_htmlparse_alt_plain_html() {
let t = TestContext::new().await;
let raw = include_bytes!("../test-data/message/text_alt_plain_html.eml");
let parser = HtmlMsgParser::from_bytes(raw).await.unwrap();
let parser = HtmlMsgParser::from_bytes(&t.ctx, raw).await.unwrap();
assert_eq!(
parser.html.replace('\r', ""), // see comment in test_htmlparse_html()
r##"<html>
@@ -412,6 +421,7 @@ test some special html-characters as &lt; &gt; and &amp; but also &quot; and &#x
async fn test_htmlparse_apple_cid_jpg() {
// load raw mime html-data with related image-part (cid:)
// and make sure, Content-Id has angle-brackets that are removed correctly.
let t = TestContext::new().await;
let raw = include_bytes!("../test-data/message/apple_cid_jpg.eml");
let test = String::from_utf8_lossy(raw);
assert!(test.contains("Content-Id: <8AE052EF-BC90-486F-BB78-58D3590308EC@fritz.box>"));
@@ -419,7 +429,7 @@ test some special html-characters as &lt; &gt; and &amp; but also &quot; and &#x
assert!(test.find("data:").is_none());
// parsing converts cid: to data:
let parser = HtmlMsgParser::from_bytes(raw).await.unwrap();
let parser = HtmlMsgParser::from_bytes(&t.ctx, raw).await.unwrap();
assert!(parser.html.contains("<html>"));
assert!(!parser.html.contains("Content-Id:"));
assert!(parser.html.contains("data:image/jpeg;base64,/9j/4AAQ"));

View File

@@ -16,7 +16,6 @@ use async_channel::Receiver;
use async_imap::types::{Fetch, Flag, Name, NameAttribute, UnsolicitedResponse};
use futures::StreamExt;
use num_traits::FromPrimitive;
use tracing::{error, info, warn};
use crate::chat::{self, ChatId, ChatIdBlocked};
use crate::config::Config;
@@ -307,7 +306,7 @@ impl Imap {
let oauth2 = self.config.lp.oauth2;
info!("Connecting to IMAP server");
info!(context, "Connecting to IMAP server");
let connection_res: Result<Client> = if self.config.lp.security == Socket::Starttls
|| self.config.lp.security == Socket::Plain
{
@@ -364,7 +363,7 @@ impl Imap {
let imap_pw: &str = config.lp.password.as_ref();
let login_res = if oauth2 {
info!("Logging into IMAP server with OAuth 2");
info!(context, "Logging into IMAP server with OAuth 2");
let addr: &str = config.addr.as_ref();
let token = get_oauth2_access_token(context, addr, imap_pw, true)
@@ -376,7 +375,7 @@ impl Imap {
};
client.authenticate("XOAUTH2", auth).await
} else {
info!("Logging into IMAP server with LOGIN");
info!(context, "Logging into IMAP server with LOGIN");
client.login(imap_user, imap_pw).await
};
@@ -392,7 +391,7 @@ impl Imap {
"IMAP-LOGIN as {}",
self.config.lp.user
)));
info!("Successfully logged into IMAP server");
info!(context, "Successfully logged into IMAP server");
Ok(())
}
@@ -400,7 +399,7 @@ impl Imap {
let imap_user = self.config.lp.user.to_owned();
let message = stock_str::cannot_login(context, &imap_user).await;
warn!("{} ({:#})", message, err);
warn!(context, "{} ({:#})", message, err);
let lock = context.wrong_pw_warning_mutex.lock().await;
if self.login_failed_once
@@ -408,7 +407,7 @@ impl Imap {
&& context.get_config_bool(Config::NotifyAboutWrongPw).await?
{
if let Err(e) = context.set_config(Config::NotifyAboutWrongPw, None).await {
warn!("{:#}", e);
warn!(context, "{:#}", e);
}
drop(lock);
@@ -418,7 +417,7 @@ impl Imap {
chat::add_device_msg_with_importance(context, None, Some(&mut msg), true)
.await
{
warn!("{:#}", e);
warn!(context, "{:#}", e);
}
} else {
self.login_failed_once = true;
@@ -446,8 +445,8 @@ impl Imap {
/// Drops the session without disconnecting properly.
/// Useful in case of an IMAP error, when it's unclear if it's in a correct state and it's
/// easier to setup a new connection.
pub fn trigger_reconnect(&mut self) {
info!("Dropping an IMAP connection.");
pub fn trigger_reconnect(&mut self, context: &Context) {
info!(context, "Dropping an IMAP connection.");
self.session = None;
}
@@ -511,7 +510,7 @@ impl Imap {
.as_mut()
.context("IMAP No connection established")?;
session.select_folder(Some(folder)).await?;
session.select_folder(context, Some(folder)).await?;
let mut list = session
.uid_fetch("1:*", RFC724MID_UID)
@@ -522,7 +521,7 @@ impl Imap {
let headers = match get_fetch_headers(&fetch) {
Ok(headers) => headers,
Err(err) => {
warn!("Failed to parse FETCH headers: {}", err);
warn!(context, "Failed to parse FETCH headers: {}", err);
continue;
}
};
@@ -540,6 +539,7 @@ impl Imap {
}
info!(
context,
"Resync: collected {} message IDs in folder {}",
msgs.len(),
folder,
@@ -582,7 +582,7 @@ impl Imap {
) -> Result<bool> {
let session = self.session.as_mut().context("no session")?;
let newly_selected = session
.select_or_create_folder(folder)
.select_or_create_folder(context, folder)
.await
.with_context(|| format!("failed to select or create folder {folder}"))?;
let mailbox = session
@@ -611,6 +611,7 @@ impl Imap {
} else if let Some(uid_next) = mailbox.uid_next {
if uid_next < old_uid_next {
warn!(
context,
"The server illegally decreased the uid_next of folder {} from {} to {} without changing validity ({}), resyncing UIDs...",
folder, old_uid_next, uid_next, new_uid_validity,
);
@@ -628,7 +629,7 @@ impl Imap {
set_modseq(context, folder, 0).await?;
if mailbox.exists == 0 {
info!("Folder \"{}\" is empty.", folder);
info!(context, "Folder \"{}\" is empty.", folder);
// set uid_next=1 for empty folders.
// If we do not do this here, we'll miss the first message
@@ -644,7 +645,10 @@ impl Imap {
let new_uid_next = match mailbox.uid_next {
Some(uid_next) => uid_next,
None => {
warn!("IMAP folder has no uid_next, fall back to fetching");
warn!(
context,
"IMAP folder has no uid_next, fall back to fetching"
);
// note that we use fetch by sequence number
// and thus we only need to get exactly the
// last-index message.
@@ -681,8 +685,13 @@ impl Imap {
job::schedule_resync(context).await?;
}
info!(
context,
"uid/validity change folder {}: new {}/{} previous {}/{}",
folder, new_uid_next, new_uid_validity, old_uid_next, old_uid_validity,
folder,
new_uid_next,
new_uid_validity,
old_uid_next,
old_uid_validity,
);
Ok(false)
}
@@ -698,7 +707,7 @@ impl Imap {
fetch_existing_msgs: bool,
) -> Result<bool> {
if should_ignore_folder(context, folder, folder_meaning).await? {
info!("Not fetching from {}", folder);
info!(context, "Not fetching from {}", folder);
return Ok(false);
}
@@ -708,7 +717,7 @@ impl Imap {
.with_context(|| format!("failed to select folder {folder}"))?;
if !new_emails && !fetch_existing_msgs {
info!("No new emails in folder {}", folder);
info!(context, "No new emails in folder {}", folder);
return Ok(false);
}
@@ -734,7 +743,7 @@ impl Imap {
let headers = match get_fetch_headers(fetch_response) {
Ok(headers) => headers,
Err(err) => {
warn!("Failed to parse FETCH headers: {}", err);
warn!(context, "Failed to parse FETCH headers: {}", err);
continue;
}
};
@@ -834,7 +843,7 @@ impl Imap {
set_uid_next(context, folder, new_uid_next).await?;
}
info!("{} mails read from \"{}\".", read_cnt, folder);
info!(context, "{} mails read from \"{}\".", read_cnt, folder);
let msg_ids: Vec<MsgId> = received_msgs
.iter()
@@ -881,7 +890,10 @@ impl Imap {
None => continue,
};
if let Some(folder) = context.get_config(config).await? {
info!("Fetching existing messages from folder \"{}\"", folder);
info!(
context,
"Fetching existing messages from folder \"{}\"", folder
);
self.fetch_new_messages(context, &folder, meaning, true)
.await
.context("could not fetch existing messages")?;
@@ -889,7 +901,7 @@ impl Imap {
}
}
info!("Done fetching existing messages.");
info!(context, "Done fetching existing messages.");
Ok(())
}
@@ -974,15 +986,21 @@ impl Session {
Err(err) => {
if context.should_delete_to_trash().await? {
error!(
context,
"Cannot move messages {} to {}, no fallback to COPY/DELETE because \
delete_to_trash is set. Error: {:#}",
set, target, err,
set,
target,
err,
);
return Err(err.into());
}
warn!(
context,
"Cannot move messages, fallback to COPY/DELETE {} to {}: {}",
set, target, err
set,
target,
err
);
}
}
@@ -993,14 +1011,14 @@ impl Session {
let copy = !context.is_trash(target).await?;
if copy {
info!(
"Server does not support MOVE, fallback to COPY/DELETE {} to {}",
set, target
context,
"Server does not support MOVE, fallback to COPY/DELETE {} to {}", set, target
);
self.uid_copy(&set, &target).await?;
} else {
error!(
"Server does not support MOVE, fallback to DELETE {} to {}",
set, target,
context,
"Server does not support MOVE, fallback to DELETE {} to {}", set, target,
);
}
context
@@ -1049,7 +1067,7 @@ impl Session {
// MOVE/DELETE operations. This does not result in multiple SELECT commands
// being sent because `select_folder()` does nothing if the folder is already
// selected.
self.select_folder(Some(folder)).await?;
self.select_folder(context, Some(folder)).await?;
// Empty target folder name means messages should be deleted.
if target.is_empty() {
@@ -1070,8 +1088,8 @@ impl Session {
// Expunge folder if needed, e.g. if some jobs have
// deleted messages on the server.
if let Err(err) = self.maybe_close_folder().await {
warn!("failed to close folder: {:?}", err);
if let Err(err) = self.maybe_close_folder(context).await {
warn!(context, "failed to close folder: {:?}", err);
}
Ok(())
@@ -1097,17 +1115,23 @@ impl Session {
.await?;
for (folder, rowid_set, uid_set) in UidGrouper::from(rows) {
self.select_folder(Some(&folder))
self.select_folder(context, Some(&folder))
.await
.context("failed to select folder")?;
if let Err(err) = self.add_flag_finalized_with_set(&uid_set, "\\Seen").await {
warn!(
context,
"Cannot mark messages {} in folder {} as seen, will retry later: {}.",
uid_set, folder, err
uid_set,
folder,
err
);
} else {
info!("Marked messages {} in folder {} as seen.", uid_set, folder);
info!(
context,
"Marked messages {} in folder {} as seen.", uid_set, folder
);
context
.sql
.execute(
@@ -1135,12 +1159,15 @@ impl Imap {
.with_context(|| format!("No IMAP connection established, folder: {folder}"))?;
if !session.can_condstore() {
info!("Server does not support CONDSTORE, skipping flag synchronization.");
info!(
context,
"Server does not support CONDSTORE, skipping flag synchronization."
);
return Ok(());
}
session
.select_folder(Some(folder))
.select_folder(context, Some(folder))
.await
.context("failed to select folder")?;
@@ -1153,8 +1180,8 @@ impl Imap {
// We are not interested in actual value of HIGHESTMODSEQ.
if mailbox.highest_modseq.is_none() {
info!(
"Mailbox {} does not support mod-sequences, skipping flag synchronization.",
folder
context,
"Mailbox {} does not support mod-sequences, skipping flag synchronization.", folder
);
return Ok(());
}
@@ -1176,7 +1203,7 @@ impl Imap {
let uid = if let Some(uid) = fetch.uid {
uid
} else {
info!("FETCH result contains no UID, skipping");
info!(context, "FETCH result contains no UID, skipping");
continue;
};
let is_seen = fetch.flags().any(|flag| flag == Flag::Seen);
@@ -1196,7 +1223,7 @@ impl Imap {
highest_modseq = modseq;
}
} else {
warn!("FETCH result contains no MODSEQ");
warn!(context, "FETCH result contains no MODSEQ");
}
}
@@ -1242,7 +1269,7 @@ impl Imap {
}
}
Err(err) => {
warn!("{}", err);
warn!(context, "{}", err);
continue;
}
};
@@ -1339,6 +1366,7 @@ impl Imap {
let session = self.session.as_mut().context("no IMAP session")?;
for (request_uids, set) in build_sequence_sets(&request_uids)? {
info!(
context,
"Starting a {} FETCH of message set \"{}\".",
if fetch_partially { "partial" } else { "full" },
set
@@ -1379,7 +1407,7 @@ impl Imap {
let next_fetch_response = match next_fetch_response {
Ok(next_fetch_response) => next_fetch_response,
Err(err) => {
warn!("Failed to process IMAP FETCH result: {}.", err);
warn!(context, "Failed to process IMAP FETCH result: {}.", err);
continue;
}
};
@@ -1395,21 +1423,24 @@ impl Imap {
// another client changes \Seen flag on a message after we do a prefetch but
// before fetch. It's not an error if we receive such unsolicited response.
info!(
"Skipping not requested FETCH response for UID {}.",
next_uid
context,
"Skipping not requested FETCH response for UID {}.", next_uid
);
} else if uid_msgs.insert(next_uid, next_fetch_response).is_some() {
warn!("Got duplicated UID {}.", next_uid);
warn!(context, "Got duplicated UID {}.", next_uid);
}
} else {
info!("Skipping FETCH response without UID.");
info!(context, "Skipping FETCH response without UID.");
}
}
let fetch_response = match fetch_response {
Some(fetch) => fetch,
None => {
warn!("Missed UID {} in the server response.", request_uid);
warn!(
context,
"Missed UID {} in the server response.", request_uid
);
continue;
}
};
@@ -1423,7 +1454,7 @@ impl Imap {
};
if is_deleted {
info!("Not processing deleted msg {}.", request_uid);
info!(context, "Not processing deleted msg {}.", request_uid);
last_uid = Some(request_uid);
continue;
}
@@ -1431,7 +1462,10 @@ impl Imap {
let body = if let Some(body) = body {
body
} else {
info!("Not processing message {} without a BODY.", request_uid);
info!(
context,
"Not processing message {} without a BODY.", request_uid
);
last_uid = Some(request_uid);
continue;
};
@@ -1442,13 +1476,17 @@ impl Imap {
rfc724_mid
} else {
error!(
context,
"No Message-ID corresponding to UID {} passed in uid_messsage_ids.",
request_uid
);
continue;
};
info!("Passing message UID {} to receive_imf().", request_uid);
info!(
context,
"Passing message UID {} to receive_imf().", request_uid
);
match receive_imf_inner(
context,
rfc724_mid,
@@ -1465,7 +1503,7 @@ impl Imap {
}
}
Err(err) => {
warn!("receive_imf error: {:#}.", err);
warn!(context, "receive_imf error: {:#}.", err);
}
};
last_uid = Some(request_uid)
@@ -1477,13 +1515,18 @@ impl Imap {
if count != request_uids.len() {
warn!(
context,
"Failed to fetch all UIDs: got {}, requested {}, we requested the UIDs {:?}.",
count,
request_uids.len(),
request_uids,
);
} else {
info!("Successfully received {} UIDs.", request_uids.len());
info!(
context,
"Successfully received {} UIDs.",
request_uids.len()
);
}
}
@@ -1524,7 +1567,7 @@ impl Imap {
return Some(ImapActionResult::RetryLater);
}
if let Err(err) = self.prepare(context).await {
warn!("prepare_imap_op failed: {}", err);
warn!(context, "prepare_imap_op failed: {}", err);
return Some(ImapActionResult::RetryLater);
}
@@ -1534,24 +1577,24 @@ impl Imap {
.context("no IMAP connection established")
{
Err(err) => {
error!("Failed to prepare IMAP operation: {:#}", err);
error!(context, "Failed to prepare IMAP operation: {:#}", err);
return Some(ImapActionResult::Failed);
}
Ok(session) => session,
};
match session.select_folder(Some(folder)).await {
match session.select_folder(context, Some(folder)).await {
Ok(_) => None,
Err(select_folder::Error::ConnectionLost) => {
warn!("Lost imap connection");
warn!(context, "Lost imap connection");
Some(ImapActionResult::RetryLater)
}
Err(select_folder::Error::BadFolderName(folder_name)) => {
warn!("invalid folder name: {:?}", folder_name);
warn!(context, "invalid folder name: {:?}", folder_name);
Some(ImapActionResult::Failed)
}
Err(err) => {
warn!("failed to select folder {:?}: {:#}", folder, err);
warn!(context, "failed to select folder {:?}: {:#}", folder, err);
Some(ImapActionResult::RetryLater)
}
}
@@ -1580,6 +1623,7 @@ impl Imap {
/// Returns first found or created folder name.
async fn configure_mvbox<'a>(
&mut self,
context: &Context,
folders: &[&'a str],
create_mvbox: bool,
) -> Result<Option<&'a str>> {
@@ -1590,15 +1634,15 @@ impl Imap {
// Close currently selected folder if needed.
// We are going to select folders using low-level EXAMINE operations below.
session.select_folder(None).await?;
session.select_folder(context, None).await?;
for folder in folders {
info!("Looking for MVBOX-folder \"{}\"...", &folder);
info!(context, "Looking for MVBOX-folder \"{}\"...", &folder);
let res = session.examine(&folder).await;
if res.is_ok() {
info!(
"MVBOX-folder {:?} successfully selected, using it.",
&folder
context,
"MVBOX-folder {:?} successfully selected, using it.", &folder
);
session.close().await?;
return Ok(Some(folder));
@@ -1609,11 +1653,11 @@ impl Imap {
for folder in folders {
match session.create(&folder).await {
Ok(_) => {
info!("MVBOX-folder {} created.", &folder);
info!(context, "MVBOX-folder {} created.", &folder);
return Ok(Some(folder));
}
Err(err) => {
warn!("Cannot create MVBOX-folder {:?}: {}", &folder, err);
warn!(context, "Cannot create MVBOX-folder {:?}: {}", &folder, err);
}
}
}
@@ -1638,7 +1682,7 @@ impl Imap {
while let Some(folder) = folders.next().await {
let folder = folder?;
info!("Scanning folder: {:?}", folder);
info!(context, "Scanning folder: {:?}", folder);
// Update the delimiter iff there is a different one, but only once.
if let Some(d) = folder.delimiter() {
@@ -1662,11 +1706,11 @@ impl Imap {
}
drop(folders);
info!("Using \"{}\" as folder-delimiter.", delimiter);
info!(context, "Using \"{}\" as folder-delimiter.", delimiter);
let fallback_folder = format!("INBOX{delimiter}DeltaChat");
let mvbox_folder = self
.configure_mvbox(&["DeltaChat", &fallback_folder], create_mvbox)
.configure_mvbox(context, &["DeltaChat", &fallback_folder], create_mvbox)
.await
.context("failed to configure mvbox")?;
@@ -1674,7 +1718,7 @@ impl Imap {
.set_config(Config::ConfiguredInboxFolder, Some("INBOX"))
.await?;
if let Some(mvbox_folder) = mvbox_folder {
info!("Setting MVBOX FOLDER TO {}", &mvbox_folder);
info!(context, "Setting MVBOX FOLDER TO {}", &mvbox_folder);
context
.set_config(Config::ConfiguredMvboxFolder, Some(mvbox_folder))
.await?;
@@ -1687,7 +1731,7 @@ impl Imap {
.set_raw_config_int("folders_configured", DC_FOLDERS_CONFIGURED_VERSION)
.await?;
info!("FINISHED configuring IMAP-folders.");
info!(context, "FINISHED configuring IMAP-folders.");
Ok(())
}
}
@@ -1697,15 +1741,18 @@ impl Session {
/// Drains all responses from `session.unsolicited_responses` in the process.
/// If this returns `true`, this means that new emails arrived and you should
/// fetch again, even if you just fetched.
fn server_sent_unsolicited_exists(&self) -> Result<bool> {
fn server_sent_unsolicited_exists(&self, context: &Context) -> Result<bool> {
let mut unsolicited_exists = false;
while let Ok(response) = self.unsolicited_responses.try_recv() {
match response {
UnsolicitedResponse::Exists(_) => {
info!("Need to fetch again, got unsolicited EXISTS {:?}", response);
info!(
context,
"Need to fetch again, got unsolicited EXISTS {:?}", response
);
unsolicited_exists = true;
}
_ => info!("ignoring unsolicited response {:?}", response),
_ => info!(context, "ignoring unsolicited response {:?}", response),
}
}
Ok(unsolicited_exists)
@@ -1749,8 +1796,8 @@ async fn should_move_out_of_spam(
Some(res) => res,
None => {
warn!(
"Contact with From address {:?} cannot exist, not moving out of spam",
from
context,
"Contact with From address {:?} cannot exist, not moving out of spam", from
);
return Ok(false);
}
@@ -2073,7 +2120,7 @@ pub(crate) async fn prefetch_should_download(
// (prevent_rename is the last argument of from_field_to_contact_id())
if flags.any(|f| f == Flag::Draft) {
info!("Ignoring draft message");
info!(context, "Ignoring draft message");
return Ok(false);
}
@@ -2378,8 +2425,8 @@ async fn add_all_recipients_as_contacts(
m
} else {
info!(
"Folder {} is not configured, skipping fetching contacts from it.",
folder
context,
"Folder {} is not configured, skipping fetching contacts from it.", folder
);
return Ok(());
};
@@ -2403,8 +2450,10 @@ async fn add_all_recipients_as_contacts(
let recipient_addr = match ContactAddress::new(&recipient.addr) {
Err(err) => {
warn!(
context,
"Could not add contact for recipient with address {:?}: {:#}",
recipient.addr, err
recipient.addr,
err
);
continue;
}

View File

@@ -4,7 +4,6 @@ use anyhow::{bail, Context as _, Result};
use async_channel::Receiver;
use async_imap::extensions::idle::IdleResponse;
use futures_lite::FutureExt;
use tracing::{error, info, warn};
use super::session::Session;
use super::Imap;
@@ -16,6 +15,7 @@ const IDLE_TIMEOUT: Duration = Duration::from_secs(23 * 60);
impl Session {
pub async fn idle(
mut self,
context: &Context,
idle_interrupt_receiver: Receiver<InterruptInfo>,
watch_folder: Option<String>,
) -> Result<(Self, InterruptInfo)> {
@@ -27,14 +27,14 @@ impl Session {
let mut info = Default::default();
self.select_folder(watch_folder.as_deref()).await?;
self.select_folder(context, watch_folder.as_deref()).await?;
if self.server_sent_unsolicited_exists()? {
if self.server_sent_unsolicited_exists(context)? {
return Ok((self, info));
}
if let Ok(info) = idle_interrupt_receiver.try_recv() {
info!("skip idle, got interrupt {:?}", info);
info!(context, "skip idle, got interrupt {:?}", info);
return Ok((self, info));
}
@@ -56,7 +56,10 @@ impl Session {
}
let folder_name = watch_folder.as_deref().unwrap_or("None");
info!("{}: Idle entering wait-on-remote state", folder_name);
info!(
context,
"{}: Idle entering wait-on-remote state", folder_name
);
let fut = idle_wait.map(|ev| ev.map(Event::IdleResponse)).race(async {
let info = idle_interrupt_receiver.recv().await;
@@ -68,20 +71,29 @@ impl Session {
match fut.await {
Ok(Event::IdleResponse(IdleResponse::NewData(x))) => {
info!("{}: Idle has NewData {:?}", folder_name, x);
info!(context, "{}: Idle has NewData {:?}", folder_name, x);
}
Ok(Event::IdleResponse(IdleResponse::Timeout)) => {
info!("{}: Idle-wait timeout or interruption", folder_name);
info!(
context,
"{}: Idle-wait timeout or interruption", folder_name
);
}
Ok(Event::IdleResponse(IdleResponse::ManualInterrupt)) => {
info!("{}: Idle wait was interrupted manually", folder_name);
info!(
context,
"{}: Idle wait was interrupted manually", folder_name
);
}
Ok(Event::Interrupt(i)) => {
info!("{}: Idle wait was interrupted: {:?}", folder_name, &i);
info!(
context,
"{}: Idle wait was interrupted: {:?}", folder_name, &i
);
info = i;
}
Err(err) => {
warn!("{}: Idle wait errored: {:?}", folder_name, err);
warn!(context, "{}: Idle wait errored: {:?}", folder_name, err);
}
}
@@ -112,14 +124,14 @@ impl Imap {
let watch_folder = if let Some(watch_folder) = watch_folder {
watch_folder
} else {
info!("IMAP-fake-IDLE: no folder, waiting for interrupt");
info!(context, "IMAP-fake-IDLE: no folder, waiting for interrupt");
return self
.idle_interrupt_receiver
.recv()
.await
.unwrap_or_default();
};
info!("IMAP-fake-IDLEing folder={:?}", watch_folder);
info!(context, "IMAP-fake-IDLEing folder={:?}", watch_folder);
// check every minute if there are new messages
// TODO: grow sleep durations / make them more flexible
@@ -147,7 +159,7 @@ impl Imap {
// (setup_handle_if_needed might not know about them if we
// never successfully connected)
if let Err(err) = self.prepare(context).await {
warn!("fake_idle: could not connect: {}", err);
warn!(context, "fake_idle: could not connect: {}", err);
continue;
}
if let Some(session) = &self.session {
@@ -156,7 +168,7 @@ impl Imap {
break InterruptInfo::new(false);
}
}
info!("fake_idle is connected");
info!(context, "fake_idle is connected");
// we are connected, let's see if fetching messages results
// in anything. If so, we behave as if IDLE had data but
// will have already fetched the messages so perform_*_fetch
@@ -166,26 +178,27 @@ impl Imap {
.await
{
Ok(res) => {
info!("fetch_new_messages returned {:?}", res);
info!(context, "fetch_new_messages returned {:?}", res);
if res {
break InterruptInfo::new(false);
}
}
Err(err) => {
error!("could not fetch from folder: {:#}", err);
self.trigger_reconnect();
error!(context, "could not fetch from folder: {:#}", err);
self.trigger_reconnect(context);
}
}
}
Event::Interrupt(info) => {
// Interrupt
info!("Fake IDLE interrupted");
info!(context, "Fake IDLE interrupted");
break info;
}
}
};
info!(
context,
"IMAP-fake-IDLE done after {:.4}s",
SystemTime::now()
.duration_since(fake_idle_start_time)

View File

@@ -2,7 +2,6 @@ use std::{collections::BTreeMap, time::Instant};
use anyhow::{Context as _, Result};
use futures::stream::StreamExt;
use tracing::info;
use super::{get_folder_meaning_by_attrs, get_folder_meaning_by_name};
use crate::config::Config;
@@ -25,7 +24,7 @@ impl Imap {
return Ok(false);
}
}
info!("Starting full folder scan");
info!(context, "Starting full folder scan");
self.prepare(context).await?;
let folders = self.list_folders(context).await?;
@@ -66,7 +65,7 @@ impl Imap {
{
let session = self.session.as_mut().context("no session")?;
// Drain leftover unsolicited EXISTS messages
session.server_sent_unsolicited_exists()?;
session.server_sent_unsolicited_exists(context)?;
loop {
self.fetch_move_delete(context, folder.name(), folder_meaning)
@@ -75,7 +74,7 @@ impl Imap {
let session = self.session.as_mut().context("no session")?;
// If the server sent an unsocicited EXISTS during the fetch, we need to fetch again
if !session.server_sent_unsolicited_exists()? {
if !session.server_sent_unsolicited_exists(context)? {
break;
}
}

View File

@@ -1,9 +1,9 @@
//! # IMAP folder selection module.
use anyhow::Context as _;
use tracing::info;
use super::session::Session as ImapSession;
use crate::context::Context;
type Result<T> = std::result::Result<T, Error>;
@@ -34,13 +34,13 @@ impl ImapSession {
///
/// CLOSE is considerably faster than an EXPUNGE, see
/// <https://tools.ietf.org/html/rfc3501#section-6.4.2>
pub(super) async fn maybe_close_folder(&mut self) -> anyhow::Result<()> {
pub(super) async fn maybe_close_folder(&mut self, context: &Context) -> anyhow::Result<()> {
if let Some(folder) = &self.selected_folder {
if self.selected_folder_needs_expunge {
info!("Expunge messages in \"{}\".", folder);
info!(context, "Expunge messages in \"{}\".", folder);
self.close().await.context("IMAP close/expunge failed")?;
info!("close/expunge succeeded");
info!(context, "close/expunge succeeded");
self.selected_folder = None;
self.selected_folder_needs_expunge = false;
}
@@ -51,7 +51,11 @@ impl ImapSession {
/// Selects a folder, possibly updating uid_validity and, if needed,
/// expunging the folder to remove delete-marked messages.
/// Returns whether a new folder was selected.
pub(super) async fn select_folder(&mut self, folder: Option<&str>) -> Result<NewlySelected> {
pub(super) async fn select_folder(
&mut self,
context: &Context,
folder: Option<&str>,
) -> Result<NewlySelected> {
// if there is a new folder and the new folder is equal to the selected one, there's nothing to do.
// if there is _no_ new folder, we continue as we might want to expunge below.
if let Some(folder) = folder {
@@ -63,7 +67,7 @@ impl ImapSession {
}
// deselect existing folder, if needed (it's also done implicitly by SELECT, however, without EXPUNGE then)
self.maybe_close_folder().await?;
self.maybe_close_folder(context).await?;
// select new folder
if let Some(folder) = folder {
@@ -100,18 +104,19 @@ impl ImapSession {
/// Selects a folder. Tries to create it once and select again if the folder does not exist.
pub(super) async fn select_or_create_folder(
&mut self,
context: &Context,
folder: &str,
) -> anyhow::Result<NewlySelected> {
match self.select_folder( Some(folder)).await {
match self.select_folder(context, Some(folder)).await {
Ok(newly_selected) => Ok(newly_selected),
Err(err) => match err {
Error::NoFolder(..) => {
info!( "Failed to select folder {} because it does not exist, trying to create it.", folder);
info!(context, "Failed to select folder {} because it does not exist, trying to create it.", folder);
self.create(folder).await.with_context(|| {
format!("Couldn't select folder ('{err}'), then create() failed")
})?;
Ok(self.select_folder(Some(folder)).await.with_context(|| format!("failed to select newely created folder {folder}"))?)
Ok(self.select_folder(context, Some(folder)).await.with_context(|| format!("failed to select newely created folder {folder}"))?)
}
_ => Err(err).with_context(|| format!("failed to select folder {folder} with error other than NO, not trying to create it")),
},

View File

@@ -11,7 +11,6 @@ use futures_lite::FutureExt;
use rand::{thread_rng, Rng};
use tokio::fs::{self, File};
use tokio_tar::Archive;
use tracing::{error, info, warn};
use crate::blob::{BlobDirContents, BlobObject};
use crate::chat::{self, delete_and_reset_all_device_msgs, ChatId};
@@ -92,22 +91,24 @@ pub async fn imex(
let cancel = context.alloc_ongoing().await?;
let res = {
let _guard = context.scheduler.pause(context.clone()).await;
imex_inner(context, what, path, passphrase)
let mut guard = context.scheduler.pause(context.clone()).await;
let res = imex_inner(context, what, path, passphrase)
.race(async {
cancel.recv().await.ok();
Err(format_err!("canceled"))
})
.await
.await;
guard.resume().await;
res
};
context.free_ongoing().await;
if let Err(err) = res.as_ref() {
// We are using Anyhow's .context() and to show the inner error, too, we need the {:#}:
error!("IMEX failed to complete: {:#}", err);
error!(context, "IMEX failed to complete: {:#}", err);
context.emit_event(EventType::ImexProgress(0));
} else {
info!("IMEX successfully completed");
info!(context, "IMEX successfully completed");
context.emit_event(EventType::ImexProgress(1000));
}
@@ -115,7 +116,7 @@ pub async fn imex(
}
/// Returns the filename of the backup found (otherwise an error)
pub async fn has_backup(dir_name: &Path) -> Result<String> {
pub async fn has_backup(_context: &Context, dir_name: &Path) -> Result<String> {
let mut dir_iter = tokio::fs::read_dir(dir_name).await?;
let mut newest_backup_name = "".to_string();
let mut newest_backup_path: Option<PathBuf> = None;
@@ -145,7 +146,7 @@ pub async fn has_backup(dir_name: &Path) -> Result<String> {
///
/// Returns setup code.
pub async fn initiate_key_transfer(context: &Context) -> Result<String> {
let setup_code = create_setup_code();
let setup_code = create_setup_code(context);
/* this may require a keypair to be created. this may take a second ... */
let setup_file_content = render_setup_file(context, &setup_code).await?;
/* encrypting may also take a while ... */
@@ -227,7 +228,7 @@ pub async fn render_setup_file(context: &Context, passphrase: &str) -> Result<St
}
/// Creates a new setup code for Autocrypt Setup Message.
pub fn create_setup_code() -> String {
pub fn create_setup_code(_context: &Context) -> String {
let mut random_val: u16;
let mut rng = thread_rng();
let mut ret = String::new();
@@ -344,7 +345,7 @@ async fn set_self_key(
)
.await?;
info!("stored self key: {:?}", keypair.secret.key_id());
info!(context, "stored self key: {:?}", keypair.secret.key_id());
Ok(())
}
@@ -377,7 +378,7 @@ async fn imex_inner(
path: &Path,
passphrase: Option<String>,
) -> Result<()> {
info!("Import/export dir: {}", path.display());
info!(context, "Import/export dir: {}", path.display());
ensure!(context.sql.is_open().await, "Database not opened.");
context.emit_event(EventType::ImexProgress(10));
@@ -427,6 +428,7 @@ async fn import_backup(
let backup_file = File::open(backup_to_import).await?;
let file_size = backup_file.metadata().await?.len();
info!(
context,
"Import \"{}\" ({} bytes) to \"{}\".",
backup_to_import.display(),
file_size,
@@ -468,7 +470,7 @@ async fn import_backup(
if let Some(name) = from_path.file_name() {
fs::rename(&from_path, context.get_blobdir().join(name)).await?;
} else {
warn!("No file name.");
warn!(context, "No file name");
}
}
}
@@ -527,6 +529,7 @@ async fn export_backup(context: &Context, dir: &Path, passphrase: String) -> Res
.context("could not export database")?;
info!(
context,
"Backup '{}' to '{}'.",
context.get_dbfile().display(),
dest_path.display(),
@@ -540,7 +543,7 @@ async fn export_backup(context: &Context, dir: &Path, passphrase: String) -> Res
context.emit_event(EventType::ImexFileWritten(dest_path));
}
Err(e) => {
error!("backup failed: {}", e);
error!(context, "backup failed: {}", e);
}
}
@@ -612,7 +615,7 @@ async fn import_self_keys(context: &Context, dir: &Path) -> Result<()> {
continue;
}
set_default = if name_f.contains("legacy") {
info!("found legacy key '{}'", path_plus_name.display());
info!(context, "found legacy key '{}'", path_plus_name.display());
false
} else {
true
@@ -622,13 +625,17 @@ async fn import_self_keys(context: &Context, dir: &Path) -> Result<()> {
continue;
}
}
info!("considering key file: {}", path_plus_name.display());
info!(
context,
"considering key file: {}",
path_plus_name.display()
);
match read_file(context, &path_plus_name).await {
Ok(buf) => {
let armored = std::string::String::from_utf8_lossy(&buf);
if let Err(err) = set_self_key(context, &armored, set_default, false).await {
info!("set_self_key: {}", err);
info!(context, "set_self_key: {}", err);
continue;
}
}
@@ -673,7 +680,7 @@ async fn export_self_keys(context: &Context, dir: &Path) -> Result<()> {
let id = Some(id).filter(|_| is_default != 0);
if let Ok(key) = public_key {
if let Err(err) = export_key_to_asc_file(context, dir, id, &key).await {
error!("Failed to export public key: {:#}.", err);
error!(context, "Failed to export public key: {:#}.", err);
export_errors += 1;
}
} else {
@@ -681,7 +688,7 @@ async fn export_self_keys(context: &Context, dir: &Path) -> Result<()> {
}
if let Ok(key) = private_key {
if let Err(err) = export_key_to_asc_file(context, dir, id, &key).await {
error!("Failed to export private key: {:#}.", err);
error!(context, "Failed to export private key: {:#}.", err);
export_errors += 1;
}
} else {
@@ -718,6 +725,7 @@ where
dir.join(format!("{}-key-{}.asc", kind, &id))
};
info!(
context,
"Exporting key {:?} to {}",
key.key_id(),
file_name.display()
@@ -758,7 +766,7 @@ async fn export_database(context: &Context, dest: &Path, passphrase: String) ->
.sql
.call_write(|conn| {
conn.execute("VACUUM;", params![])
.map_err(|err| warn!("Vacuum failed, exporting anyway {err:#}."))
.map_err(|err| warn!(context, "Vacuum failed, exporting anyway {err}"))
.ok();
conn.execute(
"ATTACH DATABASE ? AS backup KEY ?",
@@ -818,7 +826,8 @@ mod tests {
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_create_setup_code() {
let setupcode = create_setup_code();
let t = TestContext::new().await;
let setupcode = create_setup_code(&t);
assert_eq!(setupcode.len(), 44);
assert_eq!(setupcode.chars().nth(4).unwrap(), '-');
assert_eq!(setupcode.chars().nth(9).unwrap(), '-');
@@ -883,7 +892,7 @@ mod tests {
let context2 = TestContext::new().await;
assert!(!context2.is_configured().await?);
assert!(has_backup(backup_dir.path()).await.is_err());
assert!(has_backup(&context2, backup_dir.path()).await.is_err());
// export from context1
assert!(
@@ -897,7 +906,7 @@ mod tests {
.await;
// import to context2
let backup = has_backup(backup_dir.path()).await?;
let backup = has_backup(&context2, backup_dir.path()).await?;
// Import of unencrypted backup with incorrect "foobar" backup passphrase fails.
assert!(imex(
@@ -942,7 +951,7 @@ mod tests {
imex(&context1, ImexMode::ExportBackup, backup_dir.path(), None).await?;
// import to context2
let backup = has_backup(backup_dir.path()).await?;
let backup = has_backup(&context2, backup_dir.path()).await?;
let context2_cloned = context2.clone();
let handle = task::spawn(async move {
imex(

View File

@@ -22,7 +22,6 @@
//! getter can not connect to an impersonated provider and the provider does not offer the
//! download to an impersonated getter.
use std::cmp::Ordering;
use std::future::Future;
use std::net::Ipv4Addr;
use std::ops::Deref;
@@ -44,7 +43,6 @@ use tokio::sync::broadcast::error::RecvError;
use tokio::sync::{broadcast, Mutex};
use tokio::task::{JoinHandle, JoinSet};
use tokio_stream::wrappers::ReadDirStream;
use tracing::{error, info, warn};
use crate::blob::BlobDirContents;
use crate::chat::delete_and_reset_all_device_msgs;
@@ -93,7 +91,7 @@ impl BackupProvider {
// Acquire global "ongoing" mutex.
let cancel_token = context.alloc_ongoing().await?;
let paused_guard = context.scheduler.pause(context.clone()).await;
let mut paused_guard = context.scheduler.pause(context.clone()).await;
let context_dir = context
.get_blobdir()
.parent()
@@ -101,7 +99,7 @@ impl BackupProvider {
let dbfile = context_dir.join(DBFILE_BACKUP_NAME);
if fs::metadata(&dbfile).await.is_ok() {
fs::remove_file(&dbfile).await?;
warn!("Previous database export deleted.");
warn!(context, "Previous database export deleted");
}
let dbfile = TempPathGuard::new(dbfile);
let res = tokio::select! {
@@ -110,7 +108,7 @@ impl BackupProvider {
match res {
Ok(slf) => Ok(slf),
Err(err) => {
error!("Failed to set up second device setup: {err:#}.");
error!(context, "Failed to set up second device setup: {:#}", err);
Err(err)
},
}
@@ -121,6 +119,7 @@ impl BackupProvider {
Ok((provider, ticket)) => (provider, ticket),
Err(err) => {
context.free_ongoing().await;
paused_guard.resume().await;
return Err(err);
}
};
@@ -129,9 +128,7 @@ impl BackupProvider {
tokio::spawn(async move {
let res = Self::watch_provider(&context, provider, cancel_token).await;
context.free_ongoing().await;
// Explicit drop to move the guards into this future
drop(paused_guard);
paused_guard.resume().await;
drop(dbfile);
res
})
@@ -171,7 +168,7 @@ impl BackupProvider {
.auth_token(token)
.spawn()?;
context.emit_event(SendProgress::ProviderListening.into());
info!("Waiting for remote to connect.");
info!(context, "Waiting for remote to connect");
let ticket = provider.ticket(hash);
Ok((provider, ticket))
}
@@ -258,7 +255,7 @@ impl BackupProvider {
match &res {
Ok(_) => context.emit_event(SendProgress::Completed.into()),
Err(err) => {
error!("Backup transfer failure: {err:#}.");
error!(context, "Backup transfer failure: {err:#}");
context.emit_event(SendProgress::Failed.into())
}
}
@@ -372,7 +369,7 @@ pub async fn get_backup(context: &Context, qr: Qr) -> Result<()> {
!context.is_configured().await?,
"Cannot import backups to accounts in use."
);
let _guard = context.scheduler.pause(context.clone()).await;
let mut guard = context.scheduler.pause(context.clone()).await;
// Acquire global "ongoing" mutex.
let cancel_token = context.alloc_ongoing().await?;
@@ -384,38 +381,25 @@ pub async fn get_backup(context: &Context, qr: Qr) -> Result<()> {
}
_ = cancel_token.recv() => Err(format_err!("cancelled")),
};
guard.resume().await;
res
}
async fn get_backup_inner(context: &Context, qr: Qr) -> Result<()> {
let mut ticket = match qr {
let ticket = match qr {
Qr::Backup { ticket } => ticket,
_ => bail!("QR code for backup must be of type DCBACKUP"),
};
if ticket.addrs.is_empty() {
bail!("ticket is missing addresses to dial");
}
// Crude sorting, most local wifi's are in the 192.168.0.0/24 range so this will try
// them first.
ticket.addrs.sort_by(|a, b| {
let a = a.to_string();
let b = b.to_string();
if a.starts_with("192.168.") && !b.starts_with("192.168.") {
Ordering::Less
} else if b.starts_with("192.168.") && !a.starts_with("192.168.") {
Ordering::Greater
} else {
Ordering::Equal
}
});
for addr in &ticket.addrs {
let opts = Options {
addr: *addr,
peer_id: Some(ticket.peer),
keylog: false,
};
info!("Attempting to contact {addr}.");
info!(context, "attempting to contact {}", addr);
match transfer_from_provider(context, &ticket, opts).await {
Ok(_) => {
delete_and_reset_all_device_msgs(context).await?;
@@ -423,7 +407,7 @@ async fn get_backup_inner(context: &Context, qr: Qr) -> Result<()> {
return Ok(());
}
Err(TransferError::ConnectionError(err)) => {
warn!("Connection error: {err:#}.");
warn!(context, "Connection error: {err:#}.");
continue;
}
Err(TransferError::Other(err)) => {
@@ -493,6 +477,7 @@ async fn transfer_from_provider(
match res {
Ok(stats) => {
info!(
context,
"Backup transfer finished, transfer rate is {} Mbps.",
stats.mbits()
);
@@ -527,7 +512,7 @@ async fn on_blob(
let dbfile = context_dir.join(DBFILE_BACKUP_NAME);
if fs::metadata(&dbfile).await.is_ok() {
fs::remove_file(&dbfile).await?;
warn!("Previous database export deleted.");
warn!(context, "Previous database export deleted");
}
dbfile
} else {
@@ -547,11 +532,12 @@ async fn on_blob(
let token = ticket.token.to_string();
jobs.lock().await.spawn(async move {
if let Err(err) = context.sql.import(&path, token).await {
error!("Cannot import database: {err:#?}.");
error!(context, "cannot import database: {:#?}", err);
}
if let Err(err) = fs::remove_file(&path).await {
error!(
"Failed to delete database import file '{}': {:#?}.",
context,
"failed to delete database import file '{}': {:#?}",
path.display(),
err,
);

View File

@@ -11,7 +11,6 @@ use std::sync::atomic::Ordering;
use anyhow::{Context as _, Result};
use deltachat_derive::{FromSql, ToSql};
use rand::{thread_rng, Rng};
use tracing::{error, info, warn};
use crate::context::Context;
use crate::imap::Imap;
@@ -111,7 +110,7 @@ impl Job {
///
/// The Job is consumed by this method.
pub(crate) async fn save(self, context: &Context) -> Result<()> {
info!("saving job {:?}", self);
info!(context, "saving job {:?}", self);
if self.job_id != 0 {
context
@@ -154,7 +153,7 @@ impl<'a> Connection<'a> {
}
pub(crate) async fn perform_job(context: &Context, mut connection: Connection<'_>, mut job: Job) {
info!("Job {} started...", &job);
info!(context, "job {} started...", &job);
let try_res = match perform_job_action(context, &mut job, &mut connection, 0).await {
Status::RetryNow => perform_job_action(context, &mut job, &mut connection, 1).await,
@@ -166,33 +165,42 @@ pub(crate) async fn perform_job(context: &Context, mut connection: Connection<'_
let tries = job.tries + 1;
if tries < JOB_RETRIES {
info!("Increase job {job} tries to {tries}.");
info!(context, "increase job {} tries to {}", job, tries);
job.tries = tries;
let time_offset = get_backoff_time_offset(tries);
job.desired_timestamp = time() + time_offset;
info!(
context,
"job #{} not succeeded on try #{}, retry in {} seconds.",
job.job_id, tries, time_offset
job.job_id,
tries,
time_offset
);
job.save(context).await.unwrap_or_else(|err| {
error!("Failed to save job: {err:#}.");
error!(context, "failed to save job: {:#}", err);
});
} else {
info!("Remove job {job} as it exhausted {JOB_RETRIES} retries.");
info!(
context,
"remove job {} as it exhausted {} retries", job, JOB_RETRIES
);
job.delete(context).await.unwrap_or_else(|err| {
error!("Failed to delete job: {err:#}.");
error!(context, "failed to delete job: {:#}", err);
});
}
}
Status::Finished(res) => {
if let Err(err) = res {
warn!("Remove job {job} as it failed with error {err:#}.");
warn!(
context,
"remove job {} as it failed with error {:#}", job, err
);
} else {
info!("Remove job {job} as it succeeded.");
info!(context, "remove job {} as it succeeded", job);
}
job.delete(context).await.unwrap_or_else(|err| {
error!("failed to delete job: {:#}", err);
error!(context, "failed to delete job: {:#}", err);
});
}
}
@@ -204,13 +212,13 @@ async fn perform_job_action(
connection: &mut Connection<'_>,
tries: u32,
) -> Status {
info!("Begin immediate try {tries} of job {job}.");
info!(context, "begin immediate try {} of job {}", tries, job);
let try_res = match job.action {
Action::DownloadMsg => job.download_msg(context, connection.inbox()).await,
};
info!("Finished immediate try {tries} of job {job}.");
info!(context, "Finished immediate try {} of job {}", tries, job);
try_res
}
@@ -242,7 +250,7 @@ pub(crate) async fn schedule_resync(context: &Context) -> Result<()> {
pub async fn add(context: &Context, job: Job) -> Result<()> {
job.save(context).await.context("failed to save job")?;
info!("Interrupt: IMAP.");
info!(context, "interrupt: imap");
context
.scheduler
.interrupt_inbox(InterruptInfo::new(false))
@@ -256,7 +264,7 @@ pub async fn add(context: &Context, job: Job) -> Result<()> {
/// jobs, this is tricky and probably wrong currently. Look at the
/// SQL queries for details.
pub(crate) async fn load_next(context: &Context, info: &InterruptInfo) -> Result<Option<Job>> {
info!("Loading job.");
info!(context, "loading job");
let query;
let params;
@@ -308,19 +316,19 @@ LIMIT 1;
Ok(job) => return Ok(job),
Err(err) => {
// Remove invalid job from the DB
info!("Cleaning up job, because of {err:#}.");
info!(context, "cleaning up job, because of {:#}", err);
// TODO: improve by only doing a single query
let id = context
.sql
.query_row(query, params.clone(), |row| row.get::<_, i32>(0))
.await
.context("failed to retrieve invalid job ID from the database")?;
.context("Failed to retrieve invalid job ID from the database")?;
context
.sql
.execute("DELETE FROM jobs WHERE id=?;", paramsv![id])
.await
.with_context(|| format!("failed to delete invalid job {id}"))?;
.with_context(|| format!("Failed to delete invalid job {id}"))?;
}
}
}

View File

@@ -14,7 +14,6 @@ pub use pgp::composed::{SignedPublicKey, SignedSecretKey};
use pgp::ser::Serialize;
use pgp::types::{KeyTrait, SecretKeyTrait};
use tokio::runtime::Handle;
use tracing::info;
use crate::config::Config;
use crate::constants::KeyGenType;
@@ -212,13 +211,14 @@ async fn generate_keypair(context: &Context) -> Result<KeyPair> {
let start = std::time::SystemTime::now();
let keytype = KeyGenType::from_i32(context.get_config_int(Config::KeyGenType).await?)
.unwrap_or_default();
info!("Generating keypair with type {}", keytype);
info!(context, "Generating keypair with type {}", keytype);
let keypair = Handle::current()
.spawn_blocking(move || crate::pgp::create_keypair(addr, keytype))
.await??;
store_self_keypair(context, &keypair, KeyPairUse::Default).await?;
info!(
context,
"Keypair generated in {:.3}s.",
start.elapsed().unwrap_or_default().as_secs()
);

View File

@@ -40,6 +40,7 @@ pub trait ToSql: rusqlite::ToSql + Send + Sync {}
impl<T: rusqlite::ToSql + Send + Sync> ToSql for T {}
#[macro_use]
pub mod log;
#[cfg(feature = "internals")]

View File

@@ -8,7 +8,6 @@ use async_channel::Receiver;
use bitflags::bitflags;
use quick_xml::events::{BytesEnd, BytesStart, BytesText};
use tokio::time::timeout;
use tracing::{info, warn};
use crate::chat::{self, ChatId};
use crate::contact::ContactId;
@@ -338,9 +337,9 @@ pub async fn set(context: &Context, latitude: f64, longitude: f64, accuracy: f64
ContactId::SELF,
]
).await {
warn!( "failed to store location {:#}", err);
warn!(context, "failed to store location {:#}", err);
} else {
info!("stored location for chat {}", chat_id);
info!(context, "stored location for chat {}", chat_id);
continue_streaming = true;
}
}
@@ -640,7 +639,7 @@ pub(crate) async fn location_loop(context: &Context, interrupt_receiver: Receive
loop {
let next_event = match maybe_send_locations(context).await {
Err(err) => {
warn!("maybe_send_locations failed: {:#}", err);
warn!(context, "maybe_send_locations failed: {:#}", err);
Some(60) // Retry one minute later.
}
Ok(next_event) => next_event,
@@ -653,6 +652,7 @@ pub(crate) async fn location_loop(context: &Context, interrupt_receiver: Receive
};
info!(
context,
"Location loop is waiting for {} or interrupt",
duration_to_str(duration)
);
@@ -719,7 +719,10 @@ async fn maybe_send_locations(context: &Context) -> Result<Option<u64>> {
// Send location-only message.
// Pending locations are attached automatically to every message,
// so also to this empty text message.
info!("Chat {} has pending locations, sending them.", chat_id);
info!(
context,
"Chat {} has pending locations, sending them.", chat_id
);
let mut msg = Message::new(Viewtype::Text);
msg.hidden = true;
msg.param.set_cmd(SystemMessage::LocationOnly);
@@ -727,8 +730,8 @@ async fn maybe_send_locations(context: &Context) -> Result<Option<u64>> {
} else {
// Wait until pending locations can be sent.
info!(
"Chat {} has pending locations, but they can't be sent yet.",
chat_id
context,
"Chat {} has pending locations, but they can't be sent yet.", chat_id
);
next_event = next_event
.into_iter()
@@ -737,14 +740,17 @@ async fn maybe_send_locations(context: &Context) -> Result<Option<u64>> {
}
} else {
info!(
"Chat {} has location streaming enabled, but no pending locations.",
chat_id
context,
"Chat {} has location streaming enabled, but no pending locations.", chat_id
);
}
} else {
// Location streaming was either explicitly disabled (locations_send_begin = 0) or
// locations_send_until is in the past.
info!("Disabling location streaming for chat {}.", chat_id);
info!(
context,
"Disabling location streaming for chat {}.", chat_id
);
context
.sql
.execute(

View File

@@ -4,6 +4,36 @@
use crate::context::Context;
#[macro_export]
macro_rules! info {
($ctx:expr, $msg:expr) => {
info!($ctx, $msg,)
};
($ctx:expr, $msg:expr, $($args:expr),* $(,)?) => {{
let formatted = format!($msg, $($args),*);
let full = format!("{file}:{line}: {msg}",
file = file!(),
line = line!(),
msg = &formatted);
$ctx.emit_event($crate::EventType::Info(full));
}};
}
#[macro_export]
macro_rules! warn {
($ctx:expr, $msg:expr) => {
warn!($ctx, $msg,)
};
($ctx:expr, $msg:expr, $($args:expr),* $(,)?) => {{
let formatted = format!($msg, $($args),*);
let full = format!("{file}:{line}: {msg}",
file = file!(),
line = line!(),
msg = &formatted);
$ctx.emit_event($crate::EventType::Warning(full));
}};
}
#[macro_export]
macro_rules! error {
($ctx:expr, $msg:expr) => {
@@ -11,6 +41,7 @@ macro_rules! error {
};
($ctx:expr, $msg:expr, $($args:expr),* $(,)?) => {{
let formatted = format!($msg, $($args),*);
$ctx.set_last_error(&formatted);
$ctx.emit_event($crate::EventType::Error(formatted));
}};
}
@@ -19,13 +50,13 @@ impl Context {
/// Set last error string.
/// Implemented as blocking as used from macros in different, not always async blocks.
pub fn set_last_error(&self, error: &str) {
let mut last_error = self.events.last_error.write().unwrap();
let mut last_error = self.last_error.write().unwrap();
*last_error = error.to_string();
}
/// Get last error string.
pub fn get_last_error(&self) -> String {
let last_error = &*self.events.last_error.read().unwrap();
let last_error = &*self.last_error.read().unwrap();
last_error.clone()
}
}
@@ -137,6 +168,12 @@ mod tests {
error!(t, "foo-error");
assert_eq!(t.get_last_error(), "foo-error");
warn!(t, "foo-warning");
assert_eq!(t.get_last_error(), "foo-error");
info!(t, "foo-info");
assert_eq!(t.get_last_error(), "foo-error");
error!(t, "bar-error");
error!(t, "baz-error");
assert_eq!(t.get_last_error(), "baz-error");

View File

@@ -7,7 +7,6 @@ use anyhow::{ensure, format_err, Context as _, Result};
use deltachat_derive::{FromSql, ToSql};
use rusqlite::types::ValueRef;
use serde::{Deserialize, Serialize};
use tracing::{error, info, warn};
use crate::chat::{self, Chat, ChatId};
use crate::config::Config;
@@ -338,6 +337,7 @@ impl Message {
Ok(t) => t,
Err(_) => {
warn!(
context,
concat!(
"dc_msg_load_from_db: could not get ",
"text column as non-lossy utf8 id {}"
@@ -1511,7 +1511,7 @@ pub async fn markseen_msgs(context: &Context, msg_ids: Vec<MsgId>) -> Result<()>
&& (curr_state == MessageState::InFresh || curr_state == MessageState::InNoticed)
{
update_msg_state(context, id, MessageState::InSeen).await?;
info!("Seen message {}.", id);
info!(context, "Seen message {}.", id);
markseen_on_imap_table(context, &curr_rfc724_mid).await?;
@@ -1595,11 +1595,11 @@ pub(crate) async fn set_msg_failed(context: &Context, msg_id: MsgId, error: &str
if let Ok(mut msg) = Message::load_from_db(context, msg_id).await {
if msg.state.can_fail() {
msg.state = MessageState::OutFailed;
warn!("{} failed: {}", msg_id, error);
warn!(context, "{} failed: {}", msg_id, error);
} else {
warn!(
"{} seems to have failed ({}), but state is {}",
msg_id, error, msg.state
context,
"{} seems to have failed ({}), but state is {}", msg_id, error, msg.state
)
}
@@ -1616,7 +1616,7 @@ pub(crate) async fn set_msg_failed(context: &Context, msg_id: MsgId, error: &str
msg_id,
}),
Err(e) => {
warn!("{:?}", e);
warn!(context, "{:?}", e);
}
}
}
@@ -1630,7 +1630,10 @@ pub async fn handle_mdn(
timestamp_sent: i64,
) -> Result<Option<(ChatId, MsgId)>> {
if from_id == ContactId::SELF {
warn!("ignoring MDN sent to self, this is a bug on the sender device");
warn!(
context,
"ignoring MDN sent to self, this is a bug on the sender device"
);
// This is not an error on our side,
// we successfully ignored an invalid MDN and return `Ok`.
@@ -1664,6 +1667,7 @@ pub async fn handle_mdn(
res
} else {
info!(
context,
"handle_mdn found no message with Message-ID {:?} sent by us in the database",
rfc724_mid
);
@@ -1781,7 +1785,7 @@ async fn ndn_maybe_add_info_msg(
Chattype::Mailinglist => {
// ndn_maybe_add_info_msg() is about the case when delivery to the group failed.
// If we get an NDN for the mailing list, just issue a warning.
warn!("ignoring NDN for mailing list.");
warn!(context, "ignoring NDN for mailing list.");
}
Chattype::Single | Chattype::Undefined => {}
}
@@ -1802,7 +1806,7 @@ pub async fn get_unblocked_msg_cnt(context: &Context) -> usize {
{
Ok(res) => res,
Err(err) => {
error!("get_unblocked_msg_cnt() failed. {:#}", err);
error!(context, "get_unblocked_msg_cnt() failed. {:#}", err);
0
}
}
@@ -1822,7 +1826,7 @@ pub async fn get_request_msg_cnt(context: &Context) -> usize {
{
Ok(res) => res,
Err(err) => {
error!("get_request_msg_cnt() failed. {:#}", err);
error!(context, "get_request_msg_cnt() failed. {:#}", err);
0
}
}
@@ -1893,7 +1897,7 @@ pub(crate) async fn rfc724_mid_exists(
) -> Result<Option<MsgId>> {
let rfc724_mid = rfc724_mid.trim_start_matches('<').trim_end_matches('>');
if rfc724_mid.is_empty() {
warn!("Empty rfc724_mid passed to rfc724_mid_exists");
warn!(context, "Empty rfc724_mid passed to rfc724_mid_exists");
return Ok(None);
}

View File

@@ -8,7 +8,6 @@ use chrono::TimeZone;
use format_flowed::{format_flowed, format_flowed_quote};
use lettre_email::{mime, Address, Header, MimeMultipartType, PartBuilder};
use tokio::fs;
use tracing::{info, warn};
use crate::blob::BlobObject;
use crate::chat::Chat;
@@ -652,7 +651,8 @@ impl<'a> MimeFactory<'a> {
};
let peerstates = self.peerstates_for_recipients(context).await?;
let should_encrypt = encrypt_helper.should_encrypt(e2ee_guaranteed, &peerstates)?;
let should_encrypt =
encrypt_helper.should_encrypt(context, e2ee_guaranteed, &peerstates)?;
let is_encrypted = should_encrypt && !force_plaintext;
let message = if parts.is_empty() {
@@ -730,6 +730,7 @@ impl<'a> MimeFactory<'a> {
if std::env::var(crate::DCC_MIME_DEBUG).is_ok() {
info!(
context,
"mimefactory: unencrypted message mime-body:\n{}",
message.clone().build().as_string(),
);
@@ -818,6 +819,7 @@ impl<'a> MimeFactory<'a> {
if std::env::var(crate::DCC_MIME_DEBUG).is_ok() {
info!(
context,
"mimefactory: outgoing message mime-body:\n{}",
outer_message.clone().build().as_string(),
);
@@ -938,6 +940,7 @@ impl<'a> MimeFactory<'a> {
& DC_FROM_HANDSHAKE
{
info!(
context,
"sending secure-join message \'{}\' >>>>>>>>>>>>>>>>>>>>>>>>>",
"vg-member-added",
);
@@ -1022,8 +1025,8 @@ impl<'a> MimeFactory<'a> {
let step = msg.param.get(Param::Arg).unwrap_or_default();
if !step.is_empty() {
info!(
"sending secure-join message \'{}\' >>>>>>>>>>>>>>>>>>>>>>>>>",
step,
context,
"sending secure-join message \'{}\' >>>>>>>>>>>>>>>>>>>>>>>>>", step,
);
headers
.protected
@@ -1071,7 +1074,7 @@ impl<'a> MimeFactory<'a> {
}
if let Some(grpimage) = grpimage {
info!("setting group image '{}'", grpimage);
info!(context, "setting group image '{}'", grpimage);
let mut meta = Message {
viewtype: Viewtype::Image,
..Default::default()
@@ -1231,7 +1234,7 @@ impl<'a> MimeFactory<'a> {
match self.get_location_kml_part(context).await {
Ok(part) => parts.push(part),
Err(err) => {
warn!("mimefactory: could not send location: {}", err);
warn!(context, "mimefactory: could not send location: {}", err);
}
}
}
@@ -1262,7 +1265,7 @@ impl<'a> MimeFactory<'a> {
"Chat-User-Avatar".into(),
format!("base64:{avatar}"),
)),
Err(err) => warn!("mimefactory: cannot attach selfavatar: {}", err),
Err(err) => warn!(context, "mimefactory: cannot attach selfavatar: {}", err),
},
None => headers
.protected

View File

@@ -12,7 +12,6 @@ use format_flowed::unformat_flowed;
use lettre_email::mime::{self, Mime};
use mailparse::{addrparse_header, DispositionType, MailHeader, MailHeaderMap, SingleInfo};
use once_cell::sync::Lazy;
use tracing::{error, info, warn};
use crate::aheader::{Aheader, EncryptPreference};
use crate::blob::BlobObject;
@@ -214,6 +213,7 @@ impl MimeMessage {
// Parse IMF headers.
MimeMessage::merge_headers(
context,
&mut headers,
&mut recipients,
&mut from,
@@ -231,6 +231,7 @@ impl MimeMessage {
// messages are shown as unencrypted anyway.
MimeMessage::merge_headers(
context,
&mut headers,
&mut recipients,
&mut from,
@@ -280,13 +281,14 @@ impl MimeMessage {
let public_keyring = keyring_from_peerstate(decryption_info.peerstate.as_ref());
let (mail, mut signatures, encrypted) = match tokio::task::block_in_place(|| {
try_decrypt(&mail, &private_keyring, &public_keyring)
try_decrypt(context, &mail, &private_keyring, &public_keyring)
}) {
Ok(Some((raw, signatures))) => {
mail_raw = raw;
let decrypted_mail = mailparse::parse_mail(&mail_raw)?;
if std::env::var(crate::DCC_MIME_DEBUG).is_ok() {
info!(
context,
"decrypted message mime-body:\n{}",
String::from_utf8_lossy(&mail_raw),
);
@@ -295,7 +297,7 @@ impl MimeMessage {
}
Ok(None) => (Ok(mail), HashSet::new(), false),
Err(err) => {
warn!("decryption failed: {:#}", err);
warn!(context, "decryption failed: {:#}", err);
(Err(err), HashSet::new(), false)
}
};
@@ -334,6 +336,7 @@ impl MimeMessage {
headers.remove("subject");
MimeMessage::merge_headers(
context,
&mut headers,
&mut recipients,
&mut signed_from,
@@ -352,7 +355,10 @@ impl MimeMessage {
// Or it's because someone is doing some replay attack
// - OTOH, I can't come up with an attack scenario
// where this would be useful.
warn!("From header in signed part doesn't match the outer one",);
warn!(
context,
"From header in signed part doesn't match the outer one",
);
}
}
}
@@ -462,7 +468,7 @@ impl MimeMessage {
}
/// Parses system messages.
fn parse_system_message_headers(&mut self) {
fn parse_system_message_headers(&mut self, context: &Context) {
if self.get_header(HeaderDef::AutocryptSetupMessage).is_some() {
self.parts.retain(|part| {
part.mimetype.is_none()
@@ -472,7 +478,7 @@ impl MimeMessage {
if self.parts.len() == 1 {
self.is_system_message = SystemMessage::AutocryptSetupMessage;
} else {
warn!("could not determine ASM mime-part");
warn!(context, "could not determine ASM mime-part");
}
} else if let Some(value) = self.get_header(HeaderDef::ChatContent) {
if value == "location-streaming-enabled" {
@@ -596,7 +602,7 @@ impl MimeMessage {
}
async fn parse_headers(&mut self, context: &Context) -> Result<()> {
self.parse_system_message_headers();
self.parse_system_message_headers(context);
self.parse_avatar_headers(context).await;
self.parse_videochat_headers();
if self.delivery_report.is_none() {
@@ -653,8 +659,8 @@ impl MimeMessage {
}
} else {
warn!(
"{} requested a read receipt to {}, ignoring",
from, dn_to.addr
context,
"{} requested a read receipt to {}, ignoring", from, dn_to.addr
);
}
}
@@ -718,7 +724,10 @@ impl MimeMessage {
{
Ok(blob) => Some(AvatarAction::Change(blob.as_name().to_string())),
Err(err) => {
warn!("Could not save decoded avatar to blob file: {:#}", err);
warn!(
context,
"Could not save decoded avatar to blob file: {:#}", err
);
None
}
}
@@ -786,12 +795,13 @@ impl MimeMessage {
if mail.ctype.params.get("protected-headers").is_some() {
if mail.ctype.mimetype == "text/rfc822-headers" {
warn!(
context,
"Protected headers found in text/rfc822-headers attachment: Will be ignored.",
);
return Ok(false);
}
warn!( "Ignoring nested protected headers");
warn!(context, "Ignoring nested protected headers");
}
enum MimeS {
@@ -910,7 +920,7 @@ impl MimeMessage {
if mail.subparts.len() >= 2 {
match mail.ctype.params.get("report-type").map(|s| s as &str) {
Some("disposition-notification") => {
if let Some(report) = self.process_report(mail)? {
if let Some(report) = self.process_report(context, mail)? {
self.mdn_reports.push(report);
}
@@ -928,7 +938,7 @@ impl MimeMessage {
}
// Some providers, e.g. Tiscali, forget to set the report-type. So, if it's None, assume that it might be delivery-status
Some("delivery-status") | None => {
if let Some(report) = self.process_delivery_status(mail)? {
if let Some(report) = self.process_delivery_status(context, mail)? {
self.delivery_report = Some(report);
}
@@ -992,7 +1002,7 @@ impl MimeMessage {
let (mime_type, msg_type) = get_mime_type(mail)?;
let raw_mime = mail.ctype.mimetype.to_lowercase();
let filename = get_attachment_filename(mail)?;
let filename = get_attachment_filename(context, mail)?;
let old_part_count = self.parts.len();
@@ -1012,7 +1022,7 @@ impl MimeMessage {
None => {
match mime_type.type_() {
mime::IMAGE | mime::AUDIO | mime::VIDEO | mime::APPLICATION => {
warn!("Missing attachment");
warn!(context, "Missing attachment");
return Ok(false);
}
mime::TEXT
@@ -1023,7 +1033,7 @@ impl MimeMessage {
let decoded_data = match mail.get_body() {
Ok(decoded_data) => decoded_data,
Err(err) => {
warn!("Invalid body parsed {:#}", err);
warn!(context, "Invalid body parsed {:#}", err);
// Note that it's not always an error - might be no data
return Ok(false);
}
@@ -1043,7 +1053,7 @@ impl MimeMessage {
let decoded_data = match mail.get_body() {
Ok(decoded_data) => decoded_data,
Err(err) => {
warn!("Invalid body parsed {:#}", err);
warn!(context, "Invalid body parsed {:#}", err);
// Note that it's not always an error - might be no data
return Ok(false);
}
@@ -1175,7 +1185,7 @@ impl MimeMessage {
if filename.starts_with("location") || filename.starts_with("message") {
let parsed = location::Kml::parse(decoded_data)
.map_err(|err| {
warn!("failed to parse kml part: {:#}", err);
warn!(context, "failed to parse kml part: {:#}", err);
})
.ok();
if filename.starts_with("location") {
@@ -1193,7 +1203,7 @@ impl MimeMessage {
self.sync_items = context
.parse_sync_items(serialized)
.map_err(|err| {
warn!("failed to parse sync data: {:#}", err);
warn!(context, "failed to parse sync data: {:#}", err);
})
.ok();
return Ok(());
@@ -1214,13 +1224,13 @@ impl MimeMessage {
Ok(blob) => blob,
Err(err) => {
error!(
"Could not add blob for mime part {}, error {:#}",
filename, err
context,
"Could not add blob for mime part {}, error {:#}", filename, err
);
return Ok(());
}
};
info!("added blobfile: {:?}", blob.as_name());
info!(context, "added blobfile: {:?}", blob.as_name());
/* create and register Mime part referencing the new Blob object */
let mut part = Part::default();
@@ -1251,20 +1261,23 @@ impl MimeMessage {
) -> Result<bool> {
let key = match str::from_utf8(decoded_data) {
Err(err) => {
warn!("PGP key attachment is not a UTF-8 file: {}", err);
warn!(context, "PGP key attachment is not a UTF-8 file: {}", err);
return Ok(false);
}
Ok(key) => key,
};
let key = match SignedPublicKey::from_asc(key) {
Err(err) => {
warn!("PGP key attachment is not an ASCII-armored file: {:#}", err);
warn!(
context,
"PGP key attachment is not an ASCII-armored file: {:#}", err
);
return Ok(false);
}
Ok((key, _)) => key,
};
if let Err(err) = key.verify() {
warn!("attached PGP key verification failed: {}", err);
warn!(context, "attached PGP key verification failed: {}", err);
return Ok(false);
}
if !key.details.users.iter().any(|user| {
@@ -1280,17 +1293,19 @@ impl MimeMessage {
// user have an Autocrypt-capable MUA and also attaches a key, but if that's the
// case, let 'em first disable Autocrypt and then change the key by attaching it.
warn!(
context,
"not using attached PGP key for peer '{}' because another one is already set \
with prefer-encrypt={}",
peerstate.addr, peerstate.prefer_encrypt,
peerstate.addr,
peerstate.prefer_encrypt,
);
return Ok(false);
}
}
peerstate.public_key = Some(key);
info!(
"using attached PGP key for peer '{}' with prefer-encrypt=mutual",
peerstate.addr,
context,
"using attached PGP key for peer '{}' with prefer-encrypt=mutual", peerstate.addr,
);
peerstate.prefer_encrypt = EncryptPreference::Mutual;
peerstate.save_to_db(&context.sql).await?;
@@ -1342,6 +1357,7 @@ impl MimeMessage {
}
fn merge_headers(
context: &Context,
headers: &mut HashMap<String, String>,
recipients: &mut Vec<SingleInfo>,
from: &mut Option<SingleInfo>,
@@ -1360,7 +1376,7 @@ impl MimeMessage {
Ok(addrlist) => {
*chat_disposition_notification_to = addrlist.extract_single_info();
}
Err(e) => warn!("Could not read {} address: {}", key, e),
Err(e) => warn!(context, "Could not read {} address: {}", key, e),
}
} else {
let value = field.get_value();
@@ -1382,7 +1398,11 @@ impl MimeMessage {
}
}
fn process_report(&self, report: &mailparse::ParsedMail<'_>) -> Result<Option<Report>> {
fn process_report(
&self,
context: &Context,
report: &mailparse::ParsedMail<'_>,
) -> Result<Option<Report>> {
// parse as mailheaders
let report_body = if let Some(subpart) = report.subparts.get(1) {
subpart.get_body_raw()?
@@ -1413,6 +1433,7 @@ impl MimeMessage {
}));
}
warn!(
context,
"ignoring unknown disposition-notification, Message-Id: {:?}",
report_fields.get_header_value(HeaderDef::MessageId)
);
@@ -1422,6 +1443,7 @@ impl MimeMessage {
fn process_delivery_status(
&self,
context: &Context,
report: &mailparse::ParsedMail<'_>,
) -> Result<Option<DeliveryReport>> {
// Assume failure.
@@ -1433,7 +1455,7 @@ impl MimeMessage {
if status_part.ctype.mimetype != "message/delivery-status"
&& status_part.ctype.mimetype != "message/global-delivery-status"
{
warn!( "Second part of Delivery Status Notification is not message/delivery-status or message/global-delivery-status, ignoring");
warn!(context, "Second part of Delivery Status Notification is not message/delivery-status or message/global-delivery-status, ignoring");
return Ok(None);
}
@@ -1447,14 +1469,14 @@ impl MimeMessage {
let (status_fields, _) = mailparse::parse_headers(status_body)?;
if let Some(action) = status_fields.get_first_value("action") {
if action != "failed" {
info!("DSN with {:?} action", action);
info!(context, "DSN with {:?} action", action);
failure = false;
}
} else {
warn!("DSN without action");
warn!(context, "DSN without action");
}
} else {
warn!("DSN without per-recipient fields");
warn!(context, "DSN without per-recipient fields");
}
} else {
// No message/delivery-status part.
@@ -1491,6 +1513,7 @@ impl MimeMessage {
}
warn!(
context,
"ignoring unknown ndn-notification, Message-Id: {:?}",
report_fields.get_header_value(HeaderDef::MessageId)
);
@@ -1615,7 +1638,7 @@ impl MimeMessage {
}
Ok(None) => {}
Err(err) => {
warn!("failed to handle_mdn: {:#}", err);
warn!(context, "failed to handle_mdn: {:#}", err);
}
}
}
@@ -1628,7 +1651,7 @@ impl MimeMessage {
.find(|p| p.typ == Viewtype::Text)
.map(|p| p.msg.clone());
if let Err(e) = message::handle_ndn(context, delivery_report, error).await {
warn!("Could not handle ndn: {}", e);
warn!(context, "Could not handle ndn: {}", e);
}
}
}
@@ -1676,7 +1699,7 @@ async fn update_gossip_peerstates(
let header = match value.parse::<Aheader>() {
Ok(header) => header,
Err(err) => {
warn!("Failed parsing Autocrypt-Gossip header: {}", err);
warn!(context, "Failed parsing Autocrypt-Gossip header: {}", err);
continue;
}
};
@@ -1686,16 +1709,16 @@ async fn update_gossip_peerstates(
.any(|info| addr_cmp(&info.addr, &header.addr))
{
warn!(
"Ignoring gossiped \"{}\" as the address is not in To/Cc list.",
&header.addr,
context,
"Ignoring gossiped \"{}\" as the address is not in To/Cc list.", &header.addr,
);
continue;
}
if addr_cmp(from, &header.addr) {
// Non-standard, but anyway we can't update the cached peerstate here.
warn!(
"Ignoring gossiped \"{}\" as it equals the From address",
&header.addr,
context,
"Ignoring gossiped \"{}\" as it equals the From address", &header.addr,
);
continue;
}
@@ -1886,7 +1909,10 @@ fn is_attachment_disposition(mail: &mailparse::ParsedMail<'_>) -> bool {
/// returned. If Content-Disposition is "attachment" but filename is
/// not specified, filename is guessed. If Content-Disposition cannot
/// be parsed, returns an error.
fn get_attachment_filename(mail: &mailparse::ParsedMail) -> Result<Option<String>> {
fn get_attachment_filename(
context: &Context,
mail: &mailparse::ParsedMail,
) -> Result<Option<String>> {
let ct = mail.get_content_disposition();
// try to get file name as "encoded-words" from
@@ -1898,7 +1924,7 @@ fn get_attachment_filename(mail: &mailparse::ParsedMail) -> Result<Option<String
// be graceful and just use the original name.
// some MUA, including Delta Chat up to core1.50,
// use `filename*` mistakenly for simple encoded-words without following rfc2231
warn!("apostrophed encoding invalid: {}", name);
warn!(context, "apostrophed encoding invalid: {}", name);
desired_filename = Some(name);
}
}
@@ -1950,10 +1976,10 @@ pub(crate) fn get_list_post(headers: &[MailHeader]) -> Option<String> {
.map(|s| s.addr)
}
fn get_all_addresses_from_header(
headers: &[MailHeader],
pred: fn(String) -> bool,
) -> Vec<SingleInfo> {
fn get_all_addresses_from_header<F>(headers: &[MailHeader], pred: F) -> Vec<SingleInfo>
where
F: Fn(String) -> bool,
{
let mut result: Vec<SingleInfo> = Default::default();
headers
@@ -2128,123 +2154,147 @@ mod tests {
assert!(is_attachment_disposition(&mail.subparts[1]));
}
fn load_mail_with_attachment(raw: &[u8]) -> ParsedMail<'_> {
fn load_mail_with_attachment<'a>(t: &'a TestContext, raw: &'a [u8]) -> ParsedMail<'a> {
let mail = mailparse::parse_mail(raw).unwrap();
assert!(get_attachment_filename(&mail).unwrap().is_none());
assert!(get_attachment_filename(&mail.subparts[0])
assert!(get_attachment_filename(t, &mail).unwrap().is_none());
assert!(get_attachment_filename(t, &mail.subparts[0])
.unwrap()
.is_none());
mail
}
#[test]
fn test_get_attachment_filename() {
let mail = load_mail_with_attachment(include_bytes!(
"../test-data/message/attach_filename_simple.eml"
));
let filename = get_attachment_filename(&mail.subparts[1]).unwrap();
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_get_attachment_filename() {
let t = TestContext::new().await;
let mail = load_mail_with_attachment(
&t,
include_bytes!("../test-data/message/attach_filename_simple.eml"),
);
let filename = get_attachment_filename(&t, &mail.subparts[1]).unwrap();
assert_eq!(filename, Some("test.html".to_string()))
}
#[test]
fn test_get_attachment_filename_encoded_words() {
let mail = load_mail_with_attachment(include_bytes!(
"../test-data/message/attach_filename_encoded_words.eml"
));
let filename = get_attachment_filename(&mail.subparts[1]).unwrap();
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_get_attachment_filename_encoded_words() {
let t = TestContext::new().await;
let mail = load_mail_with_attachment(
&t,
include_bytes!("../test-data/message/attach_filename_encoded_words.eml"),
);
let filename = get_attachment_filename(&t, &mail.subparts[1]).unwrap();
assert_eq!(filename, Some("Maßnahmen Okt. 2020.html".to_string()))
}
#[test]
fn test_get_attachment_filename_encoded_words_binary() {
let mail = load_mail_with_attachment(include_bytes!(
"../test-data/message/attach_filename_encoded_words_binary.eml"
));
let filename = get_attachment_filename(&mail.subparts[1]).unwrap();
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_get_attachment_filename_encoded_words_binary() {
let t = TestContext::new().await;
let mail = load_mail_with_attachment(
&t,
include_bytes!("../test-data/message/attach_filename_encoded_words_binary.eml"),
);
let filename = get_attachment_filename(&t, &mail.subparts[1]).unwrap();
assert_eq!(filename, Some(" § 165 Abs".to_string()))
}
#[test]
fn test_get_attachment_filename_encoded_words_windows1251() {
let mail = load_mail_with_attachment(include_bytes!(
"../test-data/message/attach_filename_encoded_words_windows1251.eml"
));
let filename = get_attachment_filename(&mail.subparts[1]).unwrap();
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_get_attachment_filename_encoded_words_windows1251() {
let t = TestContext::new().await;
let mail = load_mail_with_attachment(
&t,
include_bytes!("../test-data/message/attach_filename_encoded_words_windows1251.eml"),
);
let filename = get_attachment_filename(&t, &mail.subparts[1]).unwrap();
assert_eq!(filename, Some("file Что нового 2020.pdf".to_string()))
}
#[test]
fn test_get_attachment_filename_encoded_words_cont() {
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_get_attachment_filename_encoded_words_cont() {
// test continued encoded-words and also test apostropes work that way
let mail = load_mail_with_attachment(include_bytes!(
"../test-data/message/attach_filename_encoded_words_cont.eml"
));
let filename = get_attachment_filename(&mail.subparts[1]).unwrap();
let t = TestContext::new().await;
let mail = load_mail_with_attachment(
&t,
include_bytes!("../test-data/message/attach_filename_encoded_words_cont.eml"),
);
let filename = get_attachment_filename(&t, &mail.subparts[1]).unwrap();
assert_eq!(filename, Some("Maßn'ah'men Okt. 2020.html".to_string()))
}
#[test]
fn test_get_attachment_filename_encoded_words_bad_delimiter() {
let mail = load_mail_with_attachment(include_bytes!(
"../test-data/message/attach_filename_encoded_words_bad_delimiter.eml"
));
let filename = get_attachment_filename(&mail.subparts[1]).unwrap();
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_get_attachment_filename_encoded_words_bad_delimiter() {
let t = TestContext::new().await;
let mail = load_mail_with_attachment(
&t,
include_bytes!("../test-data/message/attach_filename_encoded_words_bad_delimiter.eml"),
);
let filename = get_attachment_filename(&t, &mail.subparts[1]).unwrap();
// not decoded as a space is missing after encoded-words part
assert_eq!(filename, Some("=?utf-8?q?foo?=.bar".to_string()))
}
#[test]
fn test_get_attachment_filename_apostrophed() {
let mail = load_mail_with_attachment(include_bytes!(
"../test-data/message/attach_filename_apostrophed.eml"
));
let filename = get_attachment_filename(&mail.subparts[1]).unwrap();
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_get_attachment_filename_apostrophed() {
let t = TestContext::new().await;
let mail = load_mail_with_attachment(
&t,
include_bytes!("../test-data/message/attach_filename_apostrophed.eml"),
);
let filename = get_attachment_filename(&t, &mail.subparts[1]).unwrap();
assert_eq!(filename, Some("Maßnahmen Okt. 2021.html".to_string()))
}
#[test]
fn test_get_attachment_filename_apostrophed_cont() {
let mail = load_mail_with_attachment(include_bytes!(
"../test-data/message/attach_filename_apostrophed_cont.eml"
));
let filename = get_attachment_filename(&mail.subparts[1]).unwrap();
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_get_attachment_filename_apostrophed_cont() {
let t = TestContext::new().await;
let mail = load_mail_with_attachment(
&t,
include_bytes!("../test-data/message/attach_filename_apostrophed_cont.eml"),
);
let filename = get_attachment_filename(&t, &mail.subparts[1]).unwrap();
assert_eq!(filename, Some("Maßnahmen März 2022.html".to_string()))
}
#[test]
fn test_get_attachment_filename_apostrophed_windows1251() {
let mail = load_mail_with_attachment(include_bytes!(
"../test-data/message/attach_filename_apostrophed_windows1251.eml"
));
let filename = get_attachment_filename(&mail.subparts[1]).unwrap();
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_get_attachment_filename_apostrophed_windows1251() {
let t = TestContext::new().await;
let mail = load_mail_with_attachment(
&t,
include_bytes!("../test-data/message/attach_filename_apostrophed_windows1251.eml"),
);
let filename = get_attachment_filename(&t, &mail.subparts[1]).unwrap();
assert_eq!(filename, Some("программирование.HTM".to_string()))
}
#[test]
fn test_get_attachment_filename_apostrophed_cp1252() {
let mail = load_mail_with_attachment(include_bytes!(
"../test-data/message/attach_filename_apostrophed_cp1252.eml"
));
let filename = get_attachment_filename(&mail.subparts[1]).unwrap();
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_get_attachment_filename_apostrophed_cp1252() {
let t = TestContext::new().await;
let mail = load_mail_with_attachment(
&t,
include_bytes!("../test-data/message/attach_filename_apostrophed_cp1252.eml"),
);
let filename = get_attachment_filename(&t, &mail.subparts[1]).unwrap();
assert_eq!(filename, Some("Auftragsbestätigung.pdf".to_string()))
}
#[test]
fn test_get_attachment_filename_apostrophed_invalid() {
let mail = load_mail_with_attachment(include_bytes!(
"../test-data/message/attach_filename_apostrophed_invalid.eml"
));
let filename = get_attachment_filename(&mail.subparts[1]).unwrap();
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_get_attachment_filename_apostrophed_invalid() {
let t = TestContext::new().await;
let mail = load_mail_with_attachment(
&t,
include_bytes!("../test-data/message/attach_filename_apostrophed_invalid.eml"),
);
let filename = get_attachment_filename(&t, &mail.subparts[1]).unwrap();
assert_eq!(filename, Some("somedäüta.html.zip".to_string()))
}
#[test]
fn test_get_attachment_filename_combined() {
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_get_attachment_filename_combined() {
// test that if `filename` and `filename*0` are given, the filename is not doubled
let mail = load_mail_with_attachment(include_bytes!(
"../test-data/message/attach_filename_combined.eml"
));
let filename = get_attachment_filename(&mail.subparts[1]).unwrap();
let t = TestContext::new().await;
let mail = load_mail_with_attachment(
&t,
include_bytes!("../test-data/message/attach_filename_combined.eml"),
);
let filename = get_attachment_filename(&t, &mail.subparts[1]).unwrap();
assert_eq!(filename, Some("Maßnahmen Okt. 2020.html".to_string()))
}

View File

@@ -8,7 +8,6 @@ use anyhow::{Context as _, Error, Result};
use tokio::net::{lookup_host, TcpStream};
use tokio::time::timeout;
use tokio_io_timeout::TimeoutStream;
use tracing::{info, warn};
use crate::context::Context;
use crate::tools::time;
@@ -51,8 +50,8 @@ async fn lookup_host_with_cache(
Ok(res) => res,
Err(err) => {
warn!(
"DNS resolution for {}:{} failed: {:#}.",
hostname, port, err
context,
"DNS resolution for {}:{} failed: {:#}.", hostname, port, err
);
Vec::new()
}
@@ -65,7 +64,7 @@ async fn lookup_host_with_cache(
continue;
}
info!("Resolved {}:{} into {}.", hostname, port, &addr);
info!(context, "Resolved {}:{} into {}.", hostname, port, &addr);
// Update the cache.
context
@@ -111,8 +110,8 @@ async fn lookup_host_with_cache(
}
Err(err) => {
warn!(
"Failed to parse cached address {:?}: {:#}.",
cached_address, err
context,
"Failed to parse cached address {:?}: {:#}.", cached_address, err
);
}
}
@@ -164,7 +163,10 @@ pub(crate) async fn connect_tcp(
break;
}
Err(err) => {
warn!("Failed to connect to {}: {:#}.", resolved_addr, err);
warn!(
context,
"Failed to connect to {}: {:#}.", resolved_addr, err
);
last_error = Some(err);
}
}

View File

@@ -5,7 +5,6 @@ use std::collections::HashMap;
use anyhow::Result;
use percent_encoding::{utf8_percent_encode, NON_ALPHANUMERIC};
use serde::Deserialize;
use tracing::{info, warn};
use crate::config::Config;
use crate::context::Context;
@@ -63,7 +62,7 @@ pub async fn get_oauth2_url(
redirect_uri: &str,
) -> Result<Option<String>> {
let socks5_enabled = context.get_config_bool(Config::Socks5Enabled).await?;
if let Some(oauth2) = Oauth2::from_address(addr, socks5_enabled).await {
if let Some(oauth2) = Oauth2::from_address(context, addr, socks5_enabled).await {
context
.sql
.set_raw_config("oauth2_pending_redirect_uri", Some(redirect_uri))
@@ -84,7 +83,7 @@ pub(crate) async fn get_oauth2_access_token(
regenerate: bool,
) -> Result<Option<String>> {
let socks5_enabled = context.get_config_bool(Config::Socks5Enabled).await?;
if let Some(oauth2) = Oauth2::from_address(addr, socks5_enabled).await {
if let Some(oauth2) = Oauth2::from_address(context, addr, socks5_enabled).await {
let lock = context.oauth2_mutex.lock().await;
// read generated token
@@ -106,7 +105,7 @@ pub(crate) async fn get_oauth2_access_token(
let (redirect_uri, token_url, update_redirect_uri_on_success) =
if refresh_token.is_none() || refresh_token_for != code {
info!("Generate OAuth2 refresh_token and access_token...",);
info!(context, "Generate OAuth2 refresh_token and access_token...",);
(
context
.sql
@@ -117,7 +116,10 @@ pub(crate) async fn get_oauth2_access_token(
true,
)
} else {
info!("Regenerate OAuth2 access_token by refresh_token...",);
info!(
context,
"Regenerate OAuth2 access_token by refresh_token...",
);
(
context
.sql
@@ -165,14 +167,14 @@ pub(crate) async fn get_oauth2_access_token(
Ok(response) => response,
Err(err) => {
warn!(
"Failed to parse OAuth2 JSON response from {}: error: {}",
token_url, err
context,
"Failed to parse OAuth2 JSON response from {}: error: {}", token_url, err
);
return Ok(None);
}
},
Err(err) => {
warn!("Error calling OAuth2 at {}: {:?}", token_url, err);
warn!(context, "Error calling OAuth2 at {}: {:?}", token_url, err);
return Ok(None);
}
};
@@ -213,14 +215,14 @@ pub(crate) async fn get_oauth2_access_token(
.await?;
}
} else {
warn!("Failed to find OAuth2 access token");
warn!(context, "Failed to find OAuth2 access token");
}
drop(lock);
Ok(response.access_token)
} else {
warn!("Internal OAuth2 error: 2");
warn!(context, "Internal OAuth2 error: 2");
Ok(None)
}
@@ -232,7 +234,7 @@ pub(crate) async fn get_oauth2_addr(
code: &str,
) -> Result<Option<String>> {
let socks5_enabled = context.get_config_bool(Config::Socks5Enabled).await?;
let oauth2 = match Oauth2::from_address(addr, socks5_enabled).await {
let oauth2 = match Oauth2::from_address(context, addr, socks5_enabled).await {
Some(o) => o,
None => return Ok(None),
};
@@ -258,13 +260,13 @@ pub(crate) async fn get_oauth2_addr(
}
impl Oauth2 {
async fn from_address(addr: &str, skip_mx: bool) -> Option<Self> {
async fn from_address(context: &Context, addr: &str, skip_mx: bool) -> Option<Self> {
let addr_normalized = normalize_addr(addr);
if let Some(domain) = addr_normalized
.find('@')
.map(|index| addr_normalized.split_at(index + 1).1)
{
if let Some(oauth2_authorizer) = provider::get_provider_info(domain, skip_mx)
if let Some(oauth2_authorizer) = provider::get_provider_info(context, domain, skip_mx)
.await
.and_then(|provider| provider.oauth2_authorizer.as_ref())
{
@@ -292,14 +294,14 @@ impl Oauth2 {
let client = match crate::http::get_client(socks5_config) {
Ok(cl) => cl,
Err(err) => {
warn!("failed to get HTTP client: {}", err);
warn!(context, "failed to get HTTP client: {}", err);
return None;
}
};
let response = match client.get(userinfo_url).send().await {
Ok(response) => response,
Err(err) => {
warn!("failed to get userinfo: {}", err);
warn!(context, "failed to get userinfo: {}", err);
return None;
}
};
@@ -307,7 +309,7 @@ impl Oauth2 {
let parsed = match response {
Ok(parsed) => parsed,
Err(err) => {
warn!("Error getting userinfo: {}", err);
warn!(context, "Error getting userinfo: {}", err);
return None;
}
};
@@ -317,11 +319,11 @@ impl Oauth2 {
if let Some(s) = addr.as_str() {
Some(s.to_string())
} else {
warn!("E-mail in userinfo is not a string: {}", addr);
warn!(context, "E-mail in userinfo is not a string: {}", addr);
None
}
} else {
warn!("E-mail missing in userinfo.");
warn!(context, "E-mail missing in userinfo.");
None
}
}
@@ -375,34 +377,39 @@ mod tests {
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_oauth_from_address() {
let t = TestContext::new().await;
assert_eq!(
Oauth2::from_address("hello@gmail.com", false).await,
Oauth2::from_address(&t, "hello@gmail.com", false).await,
Some(OAUTH2_GMAIL)
);
assert_eq!(
Oauth2::from_address("hello@googlemail.com", false).await,
Oauth2::from_address(&t, "hello@googlemail.com", false).await,
Some(OAUTH2_GMAIL)
);
assert_eq!(
Oauth2::from_address("hello@yandex.com", false).await,
Oauth2::from_address(&t, "hello@yandex.com", false).await,
Some(OAUTH2_YANDEX)
);
assert_eq!(
Oauth2::from_address("hello@yandex.ru", false).await,
Oauth2::from_address(&t, "hello@yandex.ru", false).await,
Some(OAUTH2_YANDEX)
);
assert_eq!(Oauth2::from_address("hello@web.de", false).await, None);
assert_eq!(Oauth2::from_address(&t, "hello@web.de", false).await, None);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_oauth_from_mx() {
// youtube staff seems to use "google workspace with oauth2", figures this out by MX lookup
let t = TestContext::new().await;
assert_eq!(
Oauth2::from_address("hello@youtube.com", false).await,
Oauth2::from_address(&t, "hello@youtube.com", false).await,
Some(OAUTH2_GMAIL)
);
// without MX lookup, we would not know as youtube.com is not in our provider-db
assert_eq!(Oauth2::from_address("hello@youtube.com", true).await, None);
assert_eq!(
Oauth2::from_address(&t, "hello@youtube.com", true).await,
None
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]

View File

@@ -4,7 +4,6 @@ use std::collections::HashSet;
use anyhow::{Context as _, Error, Result};
use num_traits::FromPrimitive;
use tracing::warn;
use crate::aheader::{Aheader, EncryptPreference};
use crate::chat::{self, Chat};
@@ -595,8 +594,10 @@ impl Peerstate {
}
Err(err) => {
warn!(
context,
"New address {:?} is not valid, not doing AEAP: {:#}.",
new_addr, err
new_addr,
err
)
}
}

View File

@@ -4,10 +4,10 @@ mod data;
use anyhow::Result;
use chrono::{NaiveDateTime, NaiveTime};
use tracing::warn;
use trust_dns_resolver::{config, AsyncResolver, TokioAsyncResolver};
use crate::config::Config;
use crate::context::Context;
use crate::provider::data::{PROVIDER_DATA, PROVIDER_IDS, PROVIDER_UPDATED};
/// Provider status according to manual testing.
@@ -184,7 +184,11 @@ fn get_resolver() -> Result<TokioAsyncResolver> {
///
/// For compatibility, email address can be passed to this function
/// instead of the domain.
pub async fn get_provider_info(domain: &str, skip_mx: bool) -> Option<&'static Provider> {
pub async fn get_provider_info(
context: &Context,
domain: &str,
skip_mx: bool,
) -> Option<&'static Provider> {
let domain = domain.rsplit('@').next()?;
if let Some(provider) = get_provider_by_domain(domain) {
@@ -192,7 +196,7 @@ pub async fn get_provider_info(domain: &str, skip_mx: bool) -> Option<&'static P
}
if !skip_mx {
if let Some(provider) = get_provider_by_mx(domain).await {
if let Some(provider) = get_provider_by_mx(context, domain).await {
return Some(provider);
}
}
@@ -212,7 +216,7 @@ pub fn get_provider_by_domain(domain: &str) -> Option<&'static Provider> {
/// Finds a provider based on MX record for the given domain.
///
/// For security reasons, only Gmail can be configured this way.
pub async fn get_provider_by_mx(domain: &str) -> Option<&'static Provider> {
pub async fn get_provider_by_mx(context: &Context, domain: &str) -> Option<&'static Provider> {
if let Ok(resolver) = get_resolver() {
let mut fqdn: String = domain.to_string();
if !fqdn.ends_with('.') {
@@ -239,7 +243,7 @@ pub async fn get_provider_by_mx(domain: &str) -> Option<&'static Provider> {
}
}
} else {
warn!("cannot get a resolver to check MX records.");
warn!(context, "cannot get a resolver to check MX records.");
}
None
@@ -268,6 +272,7 @@ mod tests {
use chrono::NaiveDate;
use super::*;
use crate::test_utils::TestContext;
use crate::tools::time;
#[test]
@@ -317,12 +322,13 @@ mod tests {
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_get_provider_info() {
assert!(get_provider_info("", false).await.is_none());
assert!(get_provider_info("google.com", false).await.unwrap().id == "gmail");
let t = TestContext::new().await;
assert!(get_provider_info(&t, "", false).await.is_none());
assert!(get_provider_info(&t, "google.com", false).await.unwrap().id == "gmail");
// get_provider_info() accepts email addresses for backwards compatibility
assert!(
get_provider_info("example@google.com", false)
get_provider_info(&t, "example@google.com", false)
.await
.unwrap()
.id

View File

@@ -8,7 +8,6 @@ pub use dclogin_scheme::LoginOptions;
use once_cell::sync::Lazy;
use percent_encoding::percent_decode_str;
use serde::Deserialize;
use tracing::info;
use self::dclogin_scheme::configure_from_login_qr;
use crate::chat::{self, get_chat_id_by_grpid, ChatIdBlocked};
@@ -248,7 +247,7 @@ fn starts_with_ignore_case(string: &str, pattern: &str) -> bool {
/// The function should be called after a QR code is scanned.
/// The function takes the raw text scanned and checks what can be done with it.
pub async fn check_qr(context: &Context, qr: &str) -> Result<Qr> {
info!("Scanned QR code: {}", qr);
info!(context, "Scanned QR code: {}", qr);
let qrcode = if starts_with_ignore_case(qr, OPENPGP4FPR_SCHEME) {
decode_openpgp(context, qr)

View File

@@ -5,7 +5,6 @@ use std::sync::atomic::Ordering;
use anyhow::{anyhow, Context as _, Result};
use async_imap::types::{Quota, QuotaResource};
use tracing::warn;
use crate::chat::add_device_msg_with_importance;
use crate::config::Config;
@@ -135,7 +134,7 @@ impl Context {
/// Called in response to `Action::UpdateRecentQuota`.
pub(crate) async fn update_recent_quota(&self, imap: &mut Imap) -> Result<()> {
if let Err(err) = imap.prepare(self).await {
warn!("could not connect: {err:#}");
warn!(self, "could not connect: {:#}", err);
return Ok(());
}
@@ -163,7 +162,7 @@ impl Context {
self.set_config(Config::QuotaExceeding, None).await?;
}
}
Err(err) => warn!("cannot get highest quota usage: {:#}", err),
Err(err) => warn!(self, "cannot get highest quota usage: {:#}", err),
}
}

View File

@@ -18,7 +18,6 @@ use std::collections::BTreeMap;
use std::fmt;
use anyhow::Result;
use tracing::info;
use crate::chat::{send_msg, ChatId};
use crate::contact::ContactId;
@@ -233,8 +232,8 @@ pub(crate) async fn set_msg_reaction(
set_msg_id_reaction(context, msg_id, chat_id, contact_id, reaction).await
} else {
info!(
"Can't assign reaction to unknown message with Message-ID {}",
in_reply_to
context,
"Can't assign reaction to unknown message with Message-ID {}", in_reply_to
);
Ok(())
}

View File

@@ -9,7 +9,6 @@ use mailparse::{parse_mail, SingleInfo};
use num_traits::FromPrimitive;
use once_cell::sync::Lazy;
use regex::Regex;
use tracing::{info, warn};
use crate::chat::{self, Chat, ChatId, ChatIdBlocked, ProtectionStatus};
use crate::config::Config;
@@ -97,10 +96,11 @@ pub(crate) async fn receive_imf_inner(
is_partial_download: Option<u32>,
fetching_existing_messages: bool,
) -> Result<Option<ReceivedMsg>> {
info!("Receiving message, seen={seen}...");
info!(context, "Receiving message, seen={}...", seen);
if std::env::var(crate::DCC_MIME_DEBUG).is_ok() {
info!(
context,
"receive_imf: incoming message mime-body:\n{}",
String::from_utf8_lossy(imf_raw),
);
@@ -109,7 +109,7 @@ pub(crate) async fn receive_imf_inner(
let mut mime_parser = match MimeMessage::from_bytes(context, imf_raw, is_partial_download).await
{
Err(err) => {
warn!("receive_imf: can't parse MIME: {err:#}.");
warn!(context, "receive_imf: can't parse MIME: {:#}", err);
let msg_ids;
if !rfc724_mid.starts_with(GENERATED_PREFIX) {
let row_id = context
@@ -138,11 +138,11 @@ pub(crate) async fn receive_imf_inner(
// we can not add even an empty record if we have no info whatsoever
if !mime_parser.has_headers() {
warn!("receive_imf: no headers found.");
warn!(context, "receive_imf: no headers found");
return Ok(None);
}
info!("Received message has Message-Id: {rfc724_mid}");
info!(context, "received message has Message-Id: {}", rfc724_mid);
// check, if the mail is already in our database.
// make sure, this check is done eg. before securejoin-processing.
@@ -151,11 +151,14 @@ pub(crate) async fn receive_imf_inner(
let msg = Message::load_from_db(context, old_msg_id).await?;
if msg.download_state() != DownloadState::Done && is_partial_download.is_none() {
// the message was partially downloaded before and is fully downloaded now.
info!("Message already partly in DB, replacing by full message.");
info!(
context,
"Message already partly in DB, replacing by full message."
);
Some(old_msg_id)
} else {
// the message was probably moved around.
info!("Message already in DB, doing nothing.");
info!(context, "Message already in DB, doing nothing.");
return Ok(None);
}
} else {
@@ -177,7 +180,10 @@ pub(crate) async fn receive_imf_inner(
match from_field_to_contact_id(context, &mime_parser.from, prevent_rename).await? {
Some(contact_id_res) => contact_id_res,
None => {
warn!("receive_imf: From field does not contain an acceptable address.");
warn!(
context,
"receive_imf: From field does not contain an acceptable address"
);
return Ok(None);
}
};
@@ -240,7 +246,10 @@ pub(crate) async fn receive_imf_inner(
.iter()
.all(|recipient| mime_parser.gossiped_addr.contains(&recipient.addr))
{
info!("Received message contains Autocrypt-Gossip for all members, updating timestamp.");
info!(
context,
"Received message contains Autocrypt-Gossip for all members, updating timestamp."
);
if chat_id.get_gossiped_timestamp(context).await? < sent_timestamp {
chat_id
.set_gossiped_timestamp(context, sent_timestamp)
@@ -260,13 +269,13 @@ pub(crate) async fn receive_imf_inner(
if from_id == ContactId::SELF {
if mime_parser.was_encrypted() {
if let Err(err) = context.execute_sync_items(sync_items).await {
warn!("receive_imf cannot execute sync items: {err:#}.");
warn!(context, "receive_imf cannot execute sync items: {:#}", err);
}
} else {
warn!("Sync items are not encrypted.");
warn!(context, "sync items are not encrypted.");
}
} else {
warn!("Sync items not sent by self.");
warn!(context, "sync items not sent by self.");
}
}
@@ -275,7 +284,7 @@ pub(crate) async fn receive_imf_inner(
.receive_status_update(from_id, insert_msg_id, status_update)
.await
{
warn!("receive_imf cannot update status: {err:#}.");
warn!(context, "receive_imf cannot update status: {:#}", err);
}
}
@@ -293,7 +302,10 @@ pub(crate) async fn receive_imf_inner(
)
.await
{
warn!("receive_imf cannot update profile image: {err:#}.");
warn!(
context,
"receive_imf cannot update profile image: {:#}", err
);
};
}
}
@@ -319,7 +331,7 @@ pub(crate) async fn receive_imf_inner(
)
.await
{
warn!("Cannot update contact status: {err:#}.");
warn!(context, "cannot update contact status: {:#}", err);
}
}
@@ -380,7 +392,10 @@ pub async fn from_field_to_contact_id(
let from_addr = match ContactAddress::new(&from.addr) {
Ok(from_addr) => from_addr,
Err(err) => {
warn!("Cannot create a contact for the given From field: {err:#}.");
warn!(
context,
"Cannot create a contact for the given From field: {:#}.", err
);
return Ok(None);
}
};
@@ -461,7 +476,7 @@ async fn add_parts(
// this message is a classic email not a chat-message nor a reply to one
match show_emails {
ShowEmails::Off => {
info!("Classical email not shown (TRASH).");
info!(context, "Classical email not shown (TRASH)");
chat_id = Some(DC_CHAT_ID_TRASH);
allow_creation = false;
}
@@ -504,7 +519,7 @@ async fn add_parts(
securejoin_seen = false;
}
Err(err) => {
warn!("Error in Secure-Join message handling: {err:#}.");
warn!(context, "Error in Secure-Join message handling: {:#}", err);
chat_id = Some(DC_CHAT_ID_TRASH);
securejoin_seen = true;
}
@@ -521,7 +536,7 @@ async fn add_parts(
if chat_id.is_none() && mime_parser.delivery_report.is_some() {
chat_id = Some(DC_CHAT_ID_TRASH);
info!("Message is a DSN (TRASH).",);
info!(context, "Message is a DSN (TRASH)",);
}
if chat_id.is_none() {
@@ -695,7 +710,10 @@ async fn add_parts(
// the contact requests will pop up and this should be just fine.
Contact::scaleup_origin_by_id(context, from_id, Origin::IncomingReplyTo)
.await?;
info!("Message is a reply to a known message, mark sender as known.",);
info!(
context,
"Message is a reply to a known message, mark sender as known.",
);
}
}
}
@@ -736,7 +754,7 @@ async fn add_parts(
chat_id = None;
}
Err(err) => {
warn!("Error in Secure-Join watching: {err:#}.");
warn!(context, "Error in Secure-Join watching: {:#}", err);
chat_id = Some(DC_CHAT_ID_TRASH);
}
}
@@ -753,7 +771,7 @@ async fn add_parts(
if is_draft {
// Most mailboxes have a "Drafts" folder where constantly new emails appear but we don't actually want to show them
info!("Email is probably just a draft (TRASH).");
info!(context, "Email is probably just a draft (TRASH)");
chat_id = Some(DC_CHAT_ID_TRASH);
}
@@ -845,14 +863,14 @@ async fn add_parts(
if fetching_existing_messages && mime_parser.decrypting_failed {
chat_id = Some(DC_CHAT_ID_TRASH);
// We are only gathering old messages on first start. We do not want to add loads of non-decryptable messages to the chats.
info!("Existing non-decipherable message (TRASH).");
info!(context, "Existing non-decipherable message. (TRASH)");
}
if mime_parser.webxdc_status_update.is_some() && mime_parser.parts.len() == 1 {
if let Some(part) = mime_parser.parts.first() {
if part.typ == Viewtype::Text && part.msg.is_empty() {
chat_id = Some(DC_CHAT_ID_TRASH);
info!("Message is a status update only (TRASH).");
info!(context, "Message is a status update only (TRASH)");
}
}
}
@@ -862,7 +880,7 @@ async fn add_parts(
DC_CHAT_ID_TRASH
} else {
chat_id.unwrap_or_else(|| {
info!("No chat id for message (TRASH).");
info!(context, "No chat id for message (TRASH)");
DC_CHAT_ID_TRASH
})
};
@@ -874,7 +892,10 @@ async fn add_parts(
match value.parse::<EphemeralTimer>() {
Ok(timer) => timer,
Err(err) => {
warn!("Can't parse ephemeral timer \"{value}\": {err:#}.");
warn!(
context,
"can't parse ephemeral timer \"{}\": {:#}", value, err
);
EphemeralTimer::Disabled
}
}
@@ -894,7 +915,12 @@ async fn add_parts(
&& !mime_parser.parts.is_empty()
&& chat_id.get_ephemeral_timer(context).await? != ephemeral_timer
{
info!("Received new ephemeral timer value {ephemeral_timer:?} for chat {chat_id}, checking if it should be applied.");
info!(
context,
"received new ephemeral timer value {:?} for chat {}, checking if it should be applied",
ephemeral_timer,
chat_id
);
if is_dc_message == MessengerMessage::Yes
&& get_previous_message(context, mime_parser)
.await?
@@ -909,7 +935,10 @@ async fn add_parts(
// value is different, it means the sender has not received some timer update that we
// have seen or sent ourselves, so we ignore incoming timer to prevent a rollback.
warn!(
"Ignoring ephemeral timer change to {ephemeral_timer:?} for chat {chat_id} to avoid rollback.",
context,
"ignoring ephemeral timer change to {:?} for chat {} to avoid rollback",
ephemeral_timer,
chat_id
);
} else if chat_id
.update_timestamp(context, Param::EphemeralSettingsTimestamp, sent_timestamp)
@@ -919,9 +948,15 @@ async fn add_parts(
.inner_set_ephemeral_timer(context, ephemeral_timer)
.await
{
warn!("Failed to modify timer for chat {chat_id}: {err:#}.");
warn!(
context,
"failed to modify timer for chat {}: {:#}", chat_id, err
);
} else {
info!("Updated ephemeral timer to {ephemeral_timer:?} for chat {chat_id}.");
info!(
context,
"updated ephemeral timer to {:?} for chat {}", ephemeral_timer, chat_id
);
if mime_parser.is_system_message != SystemMessage::EphemeralTimerChanged {
chat::add_info_msg(
context,
@@ -933,7 +968,10 @@ async fn add_parts(
}
}
} else {
warn!("Ignoring ephemeral timer change to {ephemeral_timer:?} because it is outdated.");
warn!(
context,
"ignoring ephemeral timer change to {:?} because it's outdated", ephemeral_timer
);
}
}
@@ -961,7 +999,7 @@ async fn add_parts(
if chat.is_protected() || new_status.is_some() {
if let Err(err) = check_verified_properties(context, mime_parser, from_id, to_ids).await
{
warn!("Verification problem: {err:#}.");
warn!(context, "verification problem: {:#}", err);
let s = format!("{err}. See 'Info' for more details");
mime_parser.repl_msg_by_error(&s);
} else {
@@ -1218,7 +1256,10 @@ SET rfc724_mid=excluded.rfc724_mid, chat_id=excluded.chat_id,
chat_id.unarchive_if_not_muted(context, state).await?;
info!("Message has {icnt} parts and is assigned to chat #{chat_id}.");
info!(
context,
"Message has {icnt} parts and is assigned to chat #{chat_id}."
);
// new outgoing message from another device marks the chat as noticed.
if !incoming && !chat_id.is_special() {
@@ -1299,6 +1340,7 @@ async fn save_locations(
}
} else {
warn!(
context,
"Address in location.kml {:?} is not the same as the sender address {:?}.",
addr,
contact.get_addr()
@@ -1386,8 +1428,8 @@ async fn lookup_chat_by_reply(
}
info!(
"Assigning message to {} as it's a reply to {}.",
parent_chat.id, parent.rfc724_mid
context,
"Assigning message to {} as it's a reply to {}", parent_chat.id, parent.rfc724_mid
);
return Ok(Some((parent_chat.id, parent_chat.blocked)));
}
@@ -1457,7 +1499,7 @@ async fn create_or_lookup_group(
.map(|chat_id| (chat_id, create_blocked));
return Ok(res);
} else {
info!("Creating ad-hoc group prevented from caller.");
info!(context, "creating ad-hoc group prevented from caller");
return Ok(None);
};
@@ -1483,7 +1525,7 @@ async fn create_or_lookup_group(
let create_protected = if mime_parser.get_header(HeaderDef::ChatVerified).is_some() {
if let Err(err) = check_verified_properties(context, mime_parser, from_id, to_ids).await {
warn!("Verification problem: {err:#}.");
warn!(context, "verification problem: {:#}", err);
let s = format!("{err}. See 'Info' for more details");
mime_parser.repl_msg_by_error(&s);
}
@@ -1515,7 +1557,7 @@ async fn create_or_lookup_group(
{
// Group does not exist but should be created.
if !allow_creation {
info!("Creating group forbidden by caller.");
info!(context, "creating group forbidden by caller");
return Ok(None);
}
@@ -1560,7 +1602,6 @@ async fn create_or_lookup_group(
// .await?;
//}
info!("Chat {} is created.", new_chat_id);
context.emit_event(EventType::ChatModified(new_chat_id));
}
@@ -1577,7 +1618,7 @@ async fn create_or_lookup_group(
} else {
// The message was decrypted successfully, but contains a late "quit" or otherwise
// unwanted message.
info!("Message belongs to unwanted group (TRASH).");
info!(context, "message belongs to unwanted group (TRASH)");
Ok(Some((DC_CHAT_ID_TRASH, Blocked::Not)))
}
}
@@ -1617,7 +1658,7 @@ async fn apply_group_changes(
Some(stock_str::msg_del_member(context, &removed_addr, from_id).await)
};
}
None => warn!("Removed {removed_addr:?} has no contact_id."),
None => warn!(context, "removed {:?} has no contact_id", removed_addr),
}
} else {
removed_id = None;
@@ -1642,7 +1683,7 @@ async fn apply_group_changes(
.update_timestamp(context, Param::GroupNameTimestamp, sent_timestamp)
.await?
{
info!("Updating grpname for chat {chat_id}.");
info!(context, "updating grpname for chat {}", chat_id);
context
.sql
.execute(
@@ -1682,7 +1723,7 @@ async fn apply_group_changes(
if mime_parser.get_header(HeaderDef::ChatVerified).is_some() {
if let Err(err) = check_verified_properties(context, mime_parser, from_id, to_ids).await {
warn!("Verification problem: {err:#}.");
warn!(context, "verification problem: {:#}", err);
let s = format!("{err}. See 'Info' for more details");
mime_parser.repl_msg_by_error(&s);
}
@@ -1701,7 +1742,10 @@ async fn apply_group_changes(
&& !chat::is_contact_in_chat(context, chat_id, from_id).await?
{
warn!(
"Contact {from_id} attempts to modify group chat {chat_id} member list without being a member."
context,
"Contact {} attempts to modify group chat {} member list without being a member.",
from_id,
chat_id
);
} else if chat_id
.update_timestamp(context, Param::MemberListTimestamp, sent_timestamp)
@@ -1734,7 +1778,10 @@ async fn apply_group_changes(
}
members_to_add.dedup();
info!("Adding {members_to_add:?} to chat id={chat_id}.");
info!(
context,
"adding {:?} to chat id={}", members_to_add, chat_id
);
chat::add_to_chat_contacts_table(context, chat_id, &members_to_add).await?;
send_event_chat_modified = true;
}
@@ -1742,13 +1789,19 @@ async fn apply_group_changes(
if let Some(avatar_action) = &mime_parser.group_avatar {
if !chat::is_contact_in_chat(context, chat_id, ContactId::SELF).await? {
warn!("Received group avatar update for group chat {chat_id} we are not a member of.");
warn!(
context,
"Received group avatar update for group chat {} we are not a member of.", chat_id
);
} else if !chat::is_contact_in_chat(context, chat_id, from_id).await? {
warn!(
"Contact {from_id} attempts to modify group chat {chat_id} avatar without being a member.",
context,
"Contact {} attempts to modify group chat {} avatar without being a member.",
from_id,
chat_id
);
} else {
info!("Group-avatar change for {chat_id}.");
info!(context, "group-avatar change for {}", chat_id);
if chat
.param
.update_timestamp(Param::AvatarTimestamp, sent_timestamp)?
@@ -1882,7 +1935,7 @@ async fn create_or_lookup_mailinglist(
.await
.with_context(|| {
format!(
"failed to create mailinglist '{}' for grpid={}",
"Failed to create mailinglist '{}' for grpid={}",
&name, &listid
)
})?;
@@ -1890,7 +1943,7 @@ async fn create_or_lookup_mailinglist(
chat::add_to_chat_contacts_table(context, chat_id, &[ContactId::SELF]).await?;
Ok(Some((chat_id, blocked)))
} else {
info!("Creating list forbidden by caller.");
info!(context, "creating list forbidden by caller");
Ok(None)
}
}
@@ -1913,7 +1966,7 @@ async fn apply_mailinglist_changes(
let list_post = match ContactAddress::new(list_post) {
Ok(list_post) => list_post,
Err(err) => {
warn!("Invalid List-Post: {:#}.", err);
warn!(context, "Invalid List-Post: {:#}.", err);
return Ok(());
}
};
@@ -1980,7 +2033,10 @@ async fn create_adhoc_group(
member_ids: &[ContactId],
) -> Result<Option<ChatId>> {
if mime_parser.is_mailinglist_message() {
info!("Not creating ad-hoc group for mailing list message.");
info!(
context,
"not creating ad-hoc group for mailing list message"
);
return Ok(None);
}
@@ -1994,12 +2050,15 @@ async fn create_adhoc_group(
// Chat-Group-ID and incompatible Message-ID format.
//
// Instead, assign the message to 1:1 chat with the sender.
warn!("Not creating ad-hoc group for message that cannot be decrypted.");
warn!(
context,
"not creating ad-hoc group for message that cannot be decrypted"
);
return Ok(None);
}
if member_ids.len() < 3 {
info!("Not creating ad-hoc group: too few contacts.");
info!(context, "not creating ad-hoc group: too few contacts");
return Ok(None);
}
@@ -2039,7 +2098,11 @@ async fn check_verified_properties(
// we do not fail here currently, this would exclude (a) non-deltas
// and (b) deltas with different protection views across multiple devices.
// for group creation or protection enabled/disabled, however, Chat-Verified is respected.
warn!("{} did not mark message as protected.", contact.get_addr());
warn!(
context,
"{} did not mark message as protected.",
contact.get_addr()
);
}
// ensure, the contact is verified
@@ -2101,7 +2164,8 @@ async fn check_verified_properties(
for (to_addr, mut is_verified) in rows {
info!(
"check_verified_properties: {:?} self={:?}.",
context,
"check_verified_properties: {:?} self={:?}",
to_addr,
context.is_self_addr(&to_addr).await
);
@@ -2119,7 +2183,7 @@ async fn check_verified_properties(
|| peerstate.verified_key_fingerprint != peerstate.public_key_fingerprint
&& peerstate.verified_key_fingerprint != peerstate.gossip_key_fingerprint
{
info!("{} has verified {}.", contact.get_addr(), to_addr);
info!(context, "{} has verified {}.", contact.get_addr(), to_addr);
let fp = peerstate.gossip_key_fingerprint.clone();
if let Some(fp) = fp {
peerstate.set_verified(
@@ -2251,7 +2315,7 @@ async fn add_or_lookup_contacts_by_address_list(
add_or_lookup_contact_by_addr(context, display_name, addr, origin).await?;
contact_ids.insert(contact_id);
} else {
warn!("Contact with address {:?} cannot exist.", addr);
warn!(context, "Contact with address {:?} cannot exist.", addr);
}
}

View File

@@ -1,6 +1,4 @@
use tokio::fs;
use tracing_futures::WithSubscriber;
use tracing_subscriber::{prelude::*, registry::Registry};
use super::*;
use crate::aheader::EncryptPreference;
@@ -1342,12 +1340,8 @@ async fn test_apply_mailinglist_changes_assigned_by_reply() {
let chat = Chat::load_from_db(&t, chat_id).await.unwrap();
assert!(chat.can_send(&t).await.unwrap());
let subscriber = Registry::default().with(t.to_layer());
let imf_raw = format!("In-Reply-To: 3333@example.org\n{GH_MAILINGLIST2}");
receive_imf(&t, imf_raw.as_bytes(), false)
.with_subscriber(subscriber)
.await
.unwrap();
receive_imf(&t, imf_raw.as_bytes(), false).await.unwrap();
assert_eq!(
t.get_last_msg().await.in_reply_to.unwrap(),

View File

@@ -5,19 +5,14 @@ use anyhow::{bail, Context as _, Result};
use async_channel::{self as channel, Receiver, Sender};
use futures::future::try_join_all;
use futures_lite::FutureExt;
use tokio::sync::{oneshot, RwLock, RwLockWriteGuard};
use tokio::sync::{RwLock, RwLockWriteGuard};
use tokio::task;
use tracing::instrument::Instrument;
use tracing::{error, info, warn};
use tracing_futures::WithSubscriber;
use self::connectivity::ConnectivityStore;
use crate::config::Config;
use crate::contact::{ContactId, RecentlySeenLoop};
use crate::context::future::ContextIdFutureExt;
use crate::context::Context;
use crate::ephemeral::{self, delete_expired_imap_messages};
use crate::events::EventType;
use crate::imap::{FolderMeaning, Imap};
use crate::job;
use crate::location;
@@ -61,10 +56,11 @@ impl SchedulerState {
/// Starts the scheduler if it is not yet started.
async fn do_start(mut inner: RwLockWriteGuard<'_, InnerSchedulerState>, context: Context) {
info!("Starting IO.");
info!(context, "starting IO");
let ctx = context.clone();
match Scheduler::start(context).await {
Ok(scheduler) => inner.scheduler = Some(scheduler),
Err(err) => error!("Failed to start IO: {err:#}."),
Err(err) => error!(&ctx, "Failed to start IO: {:#}", err),
}
}
@@ -82,7 +78,7 @@ impl SchedulerState {
// For this, the caller needs to instruct the event poller
// to terminate on receiving the next event and then call stop_io()
// which will emit the below event(s)
context.emit_event(EventType::Info("Stopping IO.".to_string()));
info!(context, "stopping IO");
if let Some(debug_logging) = context.debug_logging.read().await.as_ref() {
debug_logging.loop_handle.abort();
}
@@ -93,33 +89,25 @@ impl SchedulerState {
/// Pauses the IO scheduler.
///
/// If it is currently running the scheduler will be stopped. When the
/// [`IoPausedGuard`] is dropped the scheduler is started again.
/// If it is currently running the scheduler will be stopped. When
/// [`IoPausedGuard::resume`] is called the scheduler is started again.
///
/// If in the meantime [`SchedulerState::start`] or [`SchedulerState::stop`] is called
/// resume will do the right thing and restore the scheduler to the state requested by
/// the last call.
pub(crate) async fn pause<'a>(&'_ self, context: Context) -> IoPausedGuard {
{
let mut inner = self.inner.write().await;
inner.paused = true;
Self::do_stop(inner, &context).await;
let mut inner = self.inner.write().await;
inner.paused = true;
Self::do_stop(inner, &context).await;
IoPausedGuard {
context,
done: false,
}
let (tx, rx) = oneshot::channel();
tokio::spawn(async move {
rx.await.ok();
let mut inner = context.scheduler.inner.write().await;
inner.paused = false;
if inner.started && inner.scheduler.is_none() {
SchedulerState::do_start(inner, context.clone()).await;
}
});
IoPausedGuard { sender: Some(tx) }
}
/// Restarts the scheduler, only if it is running.
pub(crate) async fn restart(&self, context: &Context) {
info!("Restarting IO.");
info!(context, "restarting IO");
if self.is_running().await {
self.stop(context).await;
self.start(context.clone()).await;
@@ -206,21 +194,31 @@ struct InnerSchedulerState {
paused: bool,
}
/// Guard to make sure the IO Scheduler is resumed.
///
/// Returned by [`SchedulerState::pause`]. To resume the IO scheduler simply drop this
/// guard.
#[derive(Debug)]
pub(crate) struct IoPausedGuard {
sender: Option<oneshot::Sender<()>>,
context: Context,
done: bool,
}
impl IoPausedGuard {
pub(crate) async fn resume(&mut self) {
self.done = true;
let mut inner = self.context.scheduler.inner.write().await;
inner.paused = false;
if inner.started && inner.scheduler.is_none() {
SchedulerState::do_start(inner, self.context.clone()).await;
}
}
}
impl Drop for IoPausedGuard {
fn drop(&mut self) {
if let Some(sender) = self.sender.take() {
// Can only fail if receiver is dropped, but then we're already resumed.
sender.send(()).ok();
if self.done {
return;
}
// Async .resume() should be called manually due to lack of async drop.
error!(self.context, "Pause guard dropped without resuming.");
}
}
@@ -252,7 +250,7 @@ pub(crate) struct Scheduler {
async fn inbox_loop(ctx: Context, started: Sender<()>, inbox_handlers: ImapConnectionHandlers) {
use futures::future::FutureExt;
info!("starting inbox loop");
info!(ctx, "starting inbox loop");
let ImapConnectionHandlers {
mut connection,
stop_receiver,
@@ -262,7 +260,7 @@ async fn inbox_loop(ctx: Context, started: Sender<()>, inbox_handlers: ImapConne
let fut = async move {
let ctx = ctx1;
if let Err(err) = started.send(()).await {
warn!("Inbox loop, missing started receiver: {err:#}.");
warn!(ctx, "inbox loop, missing started receiver: {}", err);
return;
};
@@ -270,7 +268,7 @@ async fn inbox_loop(ctx: Context, started: Sender<()>, inbox_handlers: ImapConne
loop {
let job = match job::load_next(&ctx, &info).await {
Err(err) => {
error!("Failed loading job from the database: {err:#}.");
error!(ctx, "Failed loading job from the database: {:#}.", err);
None
}
Ok(job) => job,
@@ -285,14 +283,14 @@ async fn inbox_loop(ctx: Context, started: Sender<()>, inbox_handlers: ImapConne
let quota_requested = ctx.quota_update_request.swap(false, Ordering::Relaxed);
if quota_requested {
if let Err(err) = ctx.update_recent_quota(&mut connection).await {
warn!("Failed to update quota: {err:#}.");
warn!(ctx, "Failed to update quota: {:#}.", err);
}
}
let resync_requested = ctx.resync_request.swap(false, Ordering::Relaxed);
if resync_requested {
if let Err(err) = connection.resync_folders(&ctx).await {
warn!("Failed to resync folders: {err:#}.");
warn!(ctx, "Failed to resync folders: {:#}.", err);
ctx.resync_request.store(true, Ordering::Relaxed);
}
}
@@ -308,7 +306,7 @@ async fn inbox_loop(ctx: Context, started: Sender<()>, inbox_handlers: ImapConne
}
}
Err(err) => {
warn!("Failed to get last housekeeping time: {err:#}.");
warn!(ctx, "Failed to get last housekeeping time: {}", err);
}
};
@@ -322,17 +320,17 @@ async fn inbox_loop(ctx: Context, started: Sender<()>, inbox_handlers: ImapConne
if let Err(err) =
ctx.set_config_bool(Config::FetchedExistingMsgs, true).await
{
warn!("Can't set Config::FetchedExistingMsgs: {:#}", err);
warn!(ctx, "Can't set Config::FetchedExistingMsgs: {:#}", err);
}
if let Err(err) = connection.fetch_existing_msgs(&ctx).await {
warn!("Failed to fetch existing messages: {:#}", err);
connection.trigger_reconnect();
warn!(ctx, "Failed to fetch existing messages: {:#}", err);
connection.trigger_reconnect(&ctx);
}
}
}
Err(err) => {
warn!("Can't get Config::FetchedExistingMsgs: {:#}", err);
warn!(ctx, "Can't get Config::FetchedExistingMsgs: {:#}", err);
}
}
@@ -345,7 +343,7 @@ async fn inbox_loop(ctx: Context, started: Sender<()>, inbox_handlers: ImapConne
stop_receiver
.recv()
.map(|_| {
info!("shutting down inbox loop");
info!(ctx, "shutting down inbox loop");
})
.race(fut)
.await;
@@ -365,7 +363,7 @@ async fn fetch_idle(
let folder_config = match folder_meaning.to_config() {
Some(c) => c,
None => {
error!("Bad folder meaning: {}", folder_meaning);
error!(ctx, "Bad folder meaning: {}", folder_meaning);
return connection
.fake_idle(ctx, None, FolderMeaning::Unknown)
.await;
@@ -375,8 +373,8 @@ async fn fetch_idle(
Ok(folder) => folder,
Err(err) => {
warn!(
"Can not watch {} folder, failed to retrieve config: {:#}",
folder_config, err
ctx,
"Can not watch {} folder, failed to retrieve config: {:#}", folder_config, err
);
return connection
.fake_idle(ctx, None, FolderMeaning::Unknown)
@@ -388,7 +386,7 @@ async fn fetch_idle(
watch_folder
} else {
connection.connectivity.set_not_configured(ctx).await;
info!("Can not watch {folder_config} folder, not set");
info!(ctx, "Can not watch {} folder, not set", folder_config);
return connection
.fake_idle(ctx, None, FolderMeaning::Unknown)
.await;
@@ -400,8 +398,8 @@ async fn fetch_idle(
.await
.context("prepare IMAP connection")
{
warn!("{err:#}");
connection.trigger_reconnect();
warn!(ctx, "{:#}", err);
connection.trigger_reconnect(ctx);
return connection
.fake_idle(ctx, Some(watch_folder), folder_meaning)
.await;
@@ -415,7 +413,7 @@ async fn fetch_idle(
.context("store_seen_flags_on_imap")
.ok_or_log(ctx);
} else {
warn!("No session even though we just prepared it");
warn!(ctx, "No session even though we just prepared it");
}
}
@@ -425,8 +423,8 @@ async fn fetch_idle(
.await
.context("fetch_move_delete")
{
connection.trigger_reconnect();
warn!("{:#}", err);
connection.trigger_reconnect(ctx);
warn!(ctx, "{:#}", err);
return InterruptInfo::new(false);
}
@@ -449,7 +447,7 @@ async fn fetch_idle(
Err(err) => {
// Don't reconnect, if there is a problem with the connection we will realize this when IDLEing
// but maybe just one folder can't be selected or something
warn!("{:#}", err);
warn!(ctx, "{:#}", err);
}
Ok(true) => {
// Fetch the watched folder again in case scanning other folder moved messages
@@ -463,8 +461,8 @@ async fn fetch_idle(
.await
.context("fetch_move_delete after scan_folders")
{
connection.trigger_reconnect();
warn!("{:#}", err);
connection.trigger_reconnect(ctx);
warn!(ctx, "{:#}", err);
return InterruptInfo::new(false);
}
}
@@ -481,18 +479,21 @@ async fn fetch_idle(
connection.connectivity.set_connected(ctx).await;
ctx.emit_event(EventType::ImapInboxIdle);
if let Some(session) = connection.session.take() {
if !session.can_idle() {
info!("IMAP session does not support IDLE, going to fake idle.");
info!(
ctx,
"IMAP session does not support IDLE, going to fake idle."
);
return connection
.fake_idle(ctx, Some(watch_folder), folder_meaning)
.await;
}
info!("IMAP session supports IDLE, using it.");
info!(ctx, "IMAP session supports IDLE, using it.");
match session
.idle(
ctx,
connection.idle_interrupt_receiver.clone(),
Some(watch_folder),
)
@@ -504,13 +505,13 @@ async fn fetch_idle(
info
}
Err(err) => {
connection.trigger_reconnect();
warn!("{:#}", err);
connection.trigger_reconnect(ctx);
warn!(ctx, "{:#}", err);
InterruptInfo::new(false)
}
}
} else {
warn!("No IMAP session, going to fake idle.");
warn!(ctx, "No IMAP session, going to fake idle.");
connection
.fake_idle(ctx, Some(watch_folder), folder_meaning)
.await
@@ -525,7 +526,7 @@ async fn simple_imap_loop(
) {
use futures::future::FutureExt;
info!("starting simple loop for {folder_meaning}");
info!(ctx, "starting simple loop for {}", folder_meaning);
let ImapConnectionHandlers {
mut connection,
stop_receiver,
@@ -536,7 +537,7 @@ async fn simple_imap_loop(
let fut = async move {
let ctx = ctx1;
if let Err(err) = started.send(()).await {
warn!("simple imap loop, missing started receiver: {}", err);
warn!(&ctx, "simple imap loop, missing started receiver: {}", err);
return;
}
@@ -548,7 +549,7 @@ async fn simple_imap_loop(
stop_receiver
.recv()
.map(|_| {
info!("shutting down simple loop");
info!(ctx, "shutting down simple loop");
})
.race(fut)
.await;
@@ -557,7 +558,7 @@ async fn simple_imap_loop(
async fn smtp_loop(ctx: Context, started: Sender<()>, smtp_handlers: SmtpConnectionHandlers) {
use futures::future::FutureExt;
info!("starting smtp loop");
info!(ctx, "starting smtp loop");
let SmtpConnectionHandlers {
mut connection,
stop_receiver,
@@ -568,19 +569,20 @@ async fn smtp_loop(ctx: Context, started: Sender<()>, smtp_handlers: SmtpConnect
let fut = async move {
let ctx = ctx1;
if let Err(err) = started.send(()).await {
warn!("smtp loop, missing started receiver: {}", err);
warn!(&ctx, "smtp loop, missing started receiver: {}", err);
return;
}
let mut timeout = None;
loop {
if let Err(err) = send_smtp_messages(&ctx, &mut connection).await {
warn!("send_smtp_messages failed: {:#}", err);
warn!(ctx, "send_smtp_messages failed: {:#}", err);
timeout = Some(timeout.map_or(30, |timeout: u64| timeout.saturating_mul(3)))
} else {
let duration_until_can_send = ctx.ratelimit.read().await.until_can_send();
if !duration_until_can_send.is_zero() {
info!(
ctx,
"smtp got rate limited, waiting for {} until can send again",
duration_to_str(duration_until_can_send)
);
@@ -595,7 +597,7 @@ async fn smtp_loop(ctx: Context, started: Sender<()>, smtp_handlers: SmtpConnect
}
// Fake Idle
info!("smtp fake idle - started");
info!(ctx, "smtp fake idle - started");
match &connection.last_send_error {
None => connection.connectivity.set_connected(&ctx).await,
Some(err) => connection.connectivity.set_err(&ctx, err).await,
@@ -607,8 +609,8 @@ async fn smtp_loop(ctx: Context, started: Sender<()>, smtp_handlers: SmtpConnect
// unnecessary retries.
if let Some(timeout) = timeout {
info!(
"smtp has messages to retry, planning to retry {} seconds later",
timeout
ctx,
"smtp has messages to retry, planning to retry {} seconds later", timeout
);
let duration = std::time::Duration::from_secs(timeout);
tokio::time::timeout(duration, async {
@@ -617,18 +619,18 @@ async fn smtp_loop(ctx: Context, started: Sender<()>, smtp_handlers: SmtpConnect
.await
.unwrap_or_default();
} else {
info!("smtp has no messages to retry, waiting for interrupt");
info!(ctx, "smtp has no messages to retry, waiting for interrupt");
idle_interrupt_receiver.recv().await.unwrap_or_default();
};
info!("smtp fake idle - interrupted")
info!(ctx, "smtp fake idle - interrupted")
}
};
stop_receiver
.recv()
.map(|_| {
info!("shutting down smtp loop");
info!(ctx, "shutting down smtp loop");
})
.race(fut)
.await;
@@ -650,12 +652,7 @@ impl Scheduler {
let (inbox_start_send, inbox_start_recv) = channel::bounded(1);
let handle = {
let ctx = ctx.clone();
task::spawn(
inbox_loop(ctx, inbox_start_send, inbox_handlers)
.with_current_subscriber()
.bind_current_context_id()
.in_current_span(),
)
task::spawn(inbox_loop(ctx, inbox_start_send, inbox_handlers))
};
let inbox = SchedBox {
meaning: FolderMeaning::Inbox,
@@ -675,12 +672,7 @@ impl Scheduler {
let (conn_state, handlers) = ImapConnectionState::new(&ctx).await?;
let (start_send, start_recv) = channel::bounded(1);
let ctx = ctx.clone();
let handle = task::spawn(
simple_imap_loop(ctx, start_send, handlers, meaning)
.with_current_subscriber()
.bind_current_context_id()
.in_current_span(),
);
let handle = task::spawn(simple_imap_loop(ctx, start_send, handlers, meaning));
oboxes.push(SchedBox {
meaning,
conn_state,
@@ -692,37 +684,22 @@ impl Scheduler {
let smtp_handle = {
let ctx = ctx.clone();
task::spawn(
smtp_loop(ctx, smtp_start_send, smtp_handlers)
.with_current_subscriber()
.bind_current_context_id()
.in_current_span(),
)
task::spawn(smtp_loop(ctx, smtp_start_send, smtp_handlers))
};
start_recvs.push(smtp_start_recv);
let ephemeral_handle = {
let ctx = ctx.clone();
task::spawn(
async move {
ephemeral::ephemeral_loop(&ctx, ephemeral_interrupt_recv).await;
}
.with_current_subscriber()
.bind_current_context_id()
.in_current_span(),
)
task::spawn(async move {
ephemeral::ephemeral_loop(&ctx, ephemeral_interrupt_recv).await;
})
};
let location_handle = {
let ctx = ctx.clone();
task::spawn(
async move {
location::location_loop(&ctx, location_interrupt_recv).await;
}
.with_current_subscriber()
.bind_current_context_id()
.in_current_span(),
)
task::spawn(async move {
location::location_loop(&ctx, location_interrupt_recv).await;
})
};
let recently_seen_loop = RecentlySeenLoop::new(ctx.clone());
@@ -744,7 +721,7 @@ impl Scheduler {
bail!("failed to start scheduler: {}", err);
}
info!("scheduler is running");
info!(ctx, "scheduler is running");
Ok(res)
}

View File

@@ -4,7 +4,6 @@ use std::{iter::once, ops::Deref, sync::Arc};
use anyhow::{anyhow, Result};
use humansize::{format_size, BINARY};
use tokio::sync::Mutex;
use tracing::info;
use crate::events::EventType;
use crate::imap::{scan_folders::get_watched_folder_configs, FolderMeaning};
@@ -409,7 +408,7 @@ impl Context {
ret +=
&format!("<b>{}:</b> ", &*escaper::encode_minimal(root_name));
} else {
info!("connectivity: root name hidden: \"{}\"", root_name);
info!(self, "connectivity: root name hidden: \"{}\"", root_name);
}
let messages = stock_str::messages(self).await;

View File

@@ -4,7 +4,6 @@ use std::convert::TryFrom;
use anyhow::{bail, Context as _, Error, Result};
use percent_encoding::{utf8_percent_encode, AsciiSet, NON_ALPHANUMERIC};
use tracing::{error, info, warn};
use crate::aheader::EncryptPreference;
use crate::chat::{self, Chat, ChatId, ChatIdBlocked};
@@ -125,7 +124,7 @@ pub async fn get_securejoin_qr(context: &Context, group: Option<ChatId>) -> Resu
)
};
info!("Generated QR code: {}", qr);
info!(context, "Generated QR code: {}", qr);
Ok(qr)
}
@@ -134,7 +133,7 @@ async fn get_self_fingerprint(context: &Context) -> Option<Fingerprint> {
match SignedPublicKey::load_self(context).await {
Ok(key) => Some(key.fingerprint()),
Err(_) => {
warn!("get_self_fingerprint(): failed to load key");
warn!(context, "get_self_fingerprint(): failed to load key");
None
}
}
@@ -148,9 +147,9 @@ async fn get_self_fingerprint(context: &Context) -> Option<Fingerprint> {
/// The function returns immediately and the handshake will run in background.
pub async fn join_securejoin(context: &Context, qr: &str) -> Result<ChatId> {
securejoin(context, qr).await.map_err(|err| {
warn!("Fatal joiner error: {:#}", err);
warn!(context, "Fatal joiner error: {:#}", err);
// The user just scanned this QR code so has context on what failed.
error!("QR process failed");
error!(context, "QR process failed");
err
})
}
@@ -161,7 +160,7 @@ async fn securejoin(context: &Context, qr: &str) -> Result<ChatId> {
==== Step 2 in "Setup verified contact" protocol =====
========================================================*/
info!("Requesting secure-join ...",);
info!(context, "Requesting secure-join ...",);
let qr_scan = check_qr(context, qr).await?;
let invite = QrInvite::try_from(qr_scan)?;
@@ -216,6 +215,7 @@ async fn fingerprint_equals_sender(
Ok(peerstate) => peerstate,
Err(err) => {
warn!(
context,
"Failed to sender peerstate for {}: {}",
contact.get_addr(),
err
@@ -288,8 +288,8 @@ pub(crate) async fn handle_securejoin_handshake(
.context("Not a Secure-Join message")?;
info!(
">>>>>>>>>>>>>>>>>>>>>>>>> secure-join message \'{}\' received",
step,
context,
">>>>>>>>>>>>>>>>>>>>>>>>> secure-join message \'{}\' received", step,
);
let join_vg = step.starts_with("vg-");
@@ -308,15 +308,15 @@ pub(crate) async fn handle_securejoin_handshake(
let invitenumber = match mime_message.get_header(HeaderDef::SecureJoinInvitenumber) {
Some(n) => n,
None => {
warn!("Secure-join denied (invitenumber missing)");
warn!(context, "Secure-join denied (invitenumber missing)");
return Ok(HandshakeMessage::Ignore);
}
};
if !token::exists(context, token::Namespace::InviteNumber, invitenumber).await {
warn!("Secure-join denied (bad invitenumber).");
warn!(context, "Secure-join denied (bad invitenumber).");
return Ok(HandshakeMessage::Ignore);
}
info!("Secure-join requested.",);
info!(context, "Secure-join requested.",);
inviter_progress!(context, contact_id, 300);
@@ -366,7 +366,7 @@ pub(crate) async fn handle_securejoin_handshake(
return Ok(HandshakeMessage::Ignore);
}
};
if !encrypted_and_signed(mime_message, Some(&fingerprint)) {
if !encrypted_and_signed(context, mime_message, Some(&fingerprint)) {
could_not_establish_secure_connection(
context,
contact_id,
@@ -386,7 +386,7 @@ pub(crate) async fn handle_securejoin_handshake(
.await?;
return Ok(HandshakeMessage::Ignore);
}
info!("Fingerprint verified.",);
info!(context, "Fingerprint verified.",);
// verify that the `Secure-Join-Auth:`-header matches the secret written to the QR code
let auth_0 = match mime_message.get_header(HeaderDef::SecureJoinAuth) {
Some(auth) => auth,
@@ -429,7 +429,7 @@ pub(crate) async fn handle_securejoin_handshake(
return Ok(HandshakeMessage::Ignore);
}
Contact::scaleup_origin_by_id(context, contact_id, Origin::SecurejoinInvited).await?;
info!("Auth verified.",);
info!(context, "Auth verified.",);
context.emit_event(EventType::ContactsChanged(Some(contact_id)));
inviter_progress!(context, contact_id, 600);
if join_vg {
@@ -439,7 +439,7 @@ pub(crate) async fn handle_securejoin_handshake(
let field_grpid = match mime_message.get_header(HeaderDef::SecureJoinGroup) {
Some(s) => s.as_str(),
None => {
warn!("Missing Secure-Join-Group header");
warn!(context, "Missing Secure-Join-Group header");
return Ok(HandshakeMessage::Ignore);
}
};
@@ -450,7 +450,7 @@ pub(crate) async fn handle_securejoin_handshake(
chat::add_contact_to_chat_ex(context, group_chat_id, contact_id, true)
.await
{
error!("failed to add contact: {}", err);
error!(context, "failed to add contact: {}", err);
}
}
None => bail!("Chat {} not found", &field_grpid),
@@ -501,7 +501,7 @@ pub(crate) async fn handle_securejoin_handshake(
if let Ok(contact) = Contact::get_by_id(context, contact_id).await {
if contact.is_verified(context).await? == VerifiedStatus::Unverified {
warn!("{} invalid.", step);
warn!(context, "{} invalid.", step);
return Ok(HandshakeMessage::Ignore);
}
if join_vg {
@@ -510,7 +510,7 @@ pub(crate) async fn handle_securejoin_handshake(
.map(|s| s.as_str())
.unwrap_or_else(|| "");
if let Err(err) = chat::get_chat_id_by_grpid(context, field_grpid).await {
warn!("Failed to lookup chat_id from grpid: {}", err);
warn!(context, "Failed to lookup chat_id from grpid: {}", err);
return Err(
err.context(format!("Chat for group {} not found", &field_grpid))
);
@@ -518,12 +518,12 @@ pub(crate) async fn handle_securejoin_handshake(
}
Ok(HandshakeMessage::Ignore) // "Done" deletes the message and breaks multi-device
} else {
warn!("{} invalid.", step);
warn!(context, "{} invalid.", step);
Ok(HandshakeMessage::Ignore)
}
}
_ => {
warn!("invalid step: {}", step);
warn!(context, "invalid step: {}", step);
Ok(HandshakeMessage::Ignore)
}
}
@@ -557,7 +557,7 @@ pub(crate) async fn observe_securejoin_on_other_device(
let step = mime_message
.get_header(HeaderDef::SecureJoin)
.context("Not a Secure-Join message")?;
info!("observing secure-join message \'{}\'", step);
info!(context, "observing secure-join message \'{}\'", step);
match step.as_str() {
"vg-request-with-auth"
@@ -566,7 +566,11 @@ pub(crate) async fn observe_securejoin_on_other_device(
| "vc-contact-confirm"
| "vg-member-added-received"
| "vc-contact-confirm-received" => {
if !encrypted_and_signed(mime_message, get_self_fingerprint(context).await.as_ref()) {
if !encrypted_and_signed(
context,
mime_message,
get_self_fingerprint(context).await.as_ref(),
) {
could_not_establish_secure_connection(
context,
contact_id,
@@ -711,8 +715,8 @@ async fn could_not_establish_secure_connection(
let msg = stock_str::contact_not_verified(context, &contact).await;
chat::add_info_msg(context, chat_id, &msg, time()).await?;
warn!(
"StockMessage::ContactNotVerified posted to 1:1 chat ({})",
details
context,
"StockMessage::ContactNotVerified posted to 1:1 chat ({})", details
);
Ok(())
}
@@ -729,7 +733,7 @@ async fn mark_peer_as_verified(
PeerstateVerifiedStatus::BidirectVerified,
verifier,
) {
error!("Could not mark peer as verified: {}", err);
error!(context, "Could not mark peer as verified: {}", err);
return Err(err);
}
peerstate.prefer_encrypt = EncryptPreference::Mutual;
@@ -745,24 +749,25 @@ async fn mark_peer_as_verified(
******************************************************************************/
fn encrypted_and_signed(
context: &Context,
mimeparser: &MimeMessage,
expected_fingerprint: Option<&Fingerprint>,
) -> bool {
if !mimeparser.was_encrypted() {
warn!("Message not encrypted.",);
warn!(context, "Message not encrypted.",);
false
} else if let Some(expected_fingerprint) = expected_fingerprint {
if !mimeparser.signatures.contains(expected_fingerprint) {
warn!(
"Message does not match expected fingerprint {}.",
expected_fingerprint,
context,
"Message does not match expected fingerprint {}.", expected_fingerprint,
);
false
} else {
true
}
} else {
warn!("Fingerprint for comparison missing.");
warn!(context, "Fingerprint for comparison missing.");
false
}
}

View File

@@ -4,7 +4,6 @@
//! the required user interactions.
use anyhow::{Context as _, Result};
use tracing::{error, warn};
use super::bobstate::{BobHandshakeStage, BobState};
use super::qrinvite::QrInvite;
@@ -46,7 +45,7 @@ pub(super) async fn start_protocol(context: &Context, invite: QrInvite) -> Resul
let (state, stage, aborted_states) =
BobState::start_protocol(context, invite.clone(), chat_id).await?;
for state in aborted_states {
error!("Aborting previously unfinished QR Join process.");
error!(context, "Aborting previously unfinished QR Join process.");
state.notify_aborted(context, "new QR scanned").await?;
state.emit_progress(context, JoinerProgress::Error);
}
@@ -134,7 +133,10 @@ pub(super) async fn handle_contact_confirm(
Ok(retval)
}
Some(_) => {
warn!("Impossible state returned from handling handshake message");
warn!(
context,
"Impossible state returned from handling handshake message"
);
Ok(retval)
}
None => Ok(retval),
@@ -206,8 +208,8 @@ impl BobState {
let chat_id = self.joining_chat_id(context).await?;
chat::add_info_msg(context, chat_id, &msg, time()).await?;
warn!(
"StockMessage::ContactNotVerified posted to joining chat ({})",
why
context,
"StockMessage::ContactNotVerified posted to joining chat ({})", why
);
Ok(())
}

View File

@@ -9,7 +9,6 @@
use anyhow::{Error, Result};
use rusqlite::Connection;
use tracing::{info, warn};
use super::qrinvite::QrInvite;
use super::{encrypted_and_signed, fingerprint_equals_sender, mark_peer_as_verified};
@@ -97,7 +96,7 @@ impl BobState {
if fingerprint_equals_sender(context, invite.fingerprint(), invite.contact_id()).await?
{
// The scanned fingerprint matches Alice's key, we can proceed to step 4b.
info!("Taking securejoin protocol shortcut");
info!(context, "Taking securejoin protocol shortcut");
send_handshake_message(context, &invite, chat_id, BobHandshakeMsg::RequestWithAuth)
.await?;
(
@@ -246,14 +245,15 @@ impl BobState {
Some(step) => step,
None => {
warn!(
context,
"Message has no Secure-Join header: {}",
mime_message.get_rfc724_mid().unwrap_or_default()
);
return Ok(None);
}
};
if !self.is_msg_expected(step.as_str()) {
info!("{} message out of sync for BobState", step);
if !self.is_msg_expected(context, step.as_str()) {
info!(context, "{} message out of sync for BobState", step);
return Ok(None);
}
match step.as_str() {
@@ -264,19 +264,19 @@ impl BobState {
self.step_contact_confirm(context, mime_message).await
}
_ => {
warn!("Invalid step for BobState: {}", step);
warn!(context, "Invalid step for BobState: {}", step);
Ok(None)
}
}
}
/// Returns `true` if the message is expected according to the protocol.
fn is_msg_expected(&self, step: &str) -> bool {
fn is_msg_expected(&self, context: &Context, step: &str) -> bool {
let variant_matches = match self.invite {
QrInvite::Contact { .. } => step.starts_with("vc-"),
QrInvite::Group { .. } => step.starts_with("vg-"),
};
let step_matches = self.next.matches(step);
let step_matches = self.next.matches(context, step);
variant_matches && step_matches
}
@@ -289,8 +289,11 @@ impl BobState {
context: &Context,
mime_message: &MimeMessage,
) -> Result<Option<BobHandshakeStage>> {
info!("Bob Step 4 - handling vc-auth-require/vg-auth-required message");
if !encrypted_and_signed(mime_message, Some(self.invite.fingerprint())) {
info!(
context,
"Bob Step 4 - handling vc-auth-require/vg-auth-required message"
);
if !encrypted_and_signed(context, mime_message, Some(self.invite.fingerprint())) {
let reason = if mime_message.was_encrypted() {
"Valid signature missing"
} else {
@@ -307,7 +310,7 @@ impl BobState {
.await?;
return Ok(Some(BobHandshakeStage::Terminated("Fingerprint mismatch")));
}
info!("Fingerprint verified.",);
info!(context, "Fingerprint verified.",);
self.update_next(&context.sql, SecureJoinStep::ContactConfirm)
.await?;
self.send_handshake_message(context, BobHandshakeMsg::RequestWithAuth)
@@ -328,7 +331,10 @@ impl BobState {
context: &Context,
mime_message: &MimeMessage,
) -> Result<Option<BobHandshakeStage>> {
info!("Bob Step 7 - handling vc-contact-confirm/vg-member-added message");
info!(
context,
"Bob Step 7 - handling vc-contact-confirm/vg-member-added message"
);
let vg_expect_encrypted = match self.invite {
QrInvite::Contact { .. } => {
// setup-contact is always encrypted
@@ -351,7 +357,7 @@ impl BobState {
}
};
if vg_expect_encrypted
&& !encrypted_and_signed(mime_message, Some(self.invite.fingerprint()))
&& !encrypted_and_signed(context, mime_message, Some(self.invite.fingerprint()))
{
self.update_next(&context.sql, SecureJoinStep::Terminated)
.await?;
@@ -375,7 +381,7 @@ impl BobState {
.map(|s| s.as_str())
.ok_or_else(|| Error::msg("Missing Chat-Group-Member-Added header"))?;
if !context.is_self_addr(member_added).await? {
info!( "Message belongs to a different handshake (scaled up contact anyway to allow creation of group).");
info!(context, "Message belongs to a different handshake (scaled up contact anyway to allow creation of group).");
return Ok(None);
}
}
@@ -383,7 +389,10 @@ impl BobState {
self.send_handshake_message(context, BobHandshakeMsg::ContactConfirmReceived)
.await
.map_err(|_| {
warn!("Failed to send vc-contact-confirm-received/vg-member-added-received");
warn!(
context,
"Failed to send vc-contact-confirm-received/vg-member-added-received"
);
})
// This is not an error affecting the protocol outcome.
.ok();
@@ -521,16 +530,16 @@ pub enum SecureJoinStep {
impl SecureJoinStep {
/// Compares the legacy string representation of a step to a [`SecureJoinStep`] variant.
fn matches(&self, step: &str) -> bool {
fn matches(&self, context: &Context, step: &str) -> bool {
match self {
Self::AuthRequired => step == "vc-auth-required" || step == "vg-auth-required",
Self::ContactConfirm => step == "vc-contact-confirm" || step == "vg-member-added",
SecureJoinStep::Terminated => {
warn!("Terminated state for next securejoin step");
warn!(context, "Terminated state for next securejoin step");
false
}
SecureJoinStep::Completed => {
warn!("Completed state for next securejoin step");
warn!(context, "Completed state for next securejoin step");
false
}
}

View File

@@ -9,7 +9,6 @@ use async_smtp::response::{Category, Code, Detail};
use async_smtp::{self as smtp, EmailAddress, SmtpTransport};
use tokio::io::BufStream;
use tokio::task;
use tracing::{error, info, warn};
use crate::config::Config;
use crate::contact::{Contact, ContactId};
@@ -88,7 +87,7 @@ impl Smtp {
/// Connect using configured parameters.
pub async fn connect_configured(&mut self, context: &Context) -> Result<()> {
if self.has_maybe_stale_connection() {
info!("Closing stale connection");
info!(context, "Closing stale connection");
self.disconnect().await;
}
@@ -235,7 +234,7 @@ impl Smtp {
provider_strict_tls: bool,
) -> Result<()> {
if self.is_connected() {
warn!("SMTP already connected.");
warn!(context, "SMTP already connected.");
return Ok(());
}
@@ -365,7 +364,7 @@ pub(crate) async fn smtp_send(
msg_id: MsgId,
) -> SendResult {
if std::env::var(crate::DCC_MIME_DEBUG).is_ok() {
info!("smtp-sending out mime message:");
info!(context, "smtp-sending out mime message:");
println!("{message}");
}
@@ -386,7 +385,7 @@ pub(crate) async fn smtp_send(
let status = match send_result {
Err(crate::smtp::send::Error::SmtpSend(err)) => {
// Remote error, retry later.
info!("SMTP failed to send: {:?}", &err);
info!(context, "SMTP failed to send: {:?}", &err);
let res = match err {
async_smtp::error::Error::Permanent(ref response) => {
@@ -413,10 +412,10 @@ pub(crate) async fn smtp_send(
};
if maybe_transient {
info!( "Permanent error that is likely to actually be transient, postponing retry for later");
info!(context, "Permanent error that is likely to actually be transient, postponing retry for later");
SendResult::Retry
} else {
info!("Permanent error, message sending failed");
info!(context, "Permanent error, message sending failed");
// If we do not retry, add an info message to the chat.
// Yandex error "554 5.7.1 [2] Message rejected under suspicion of SPAM; https://ya.cc/..."
// should definitely go here, because user has to open the link to
@@ -437,22 +436,27 @@ pub(crate) async fn smtp_send(
// Any extended smtp status codes like x.1.1, x.1.2 or x.1.3 that we
// receive as a transient error are misconfigurations of the smtp server.
// See <https://tools.ietf.org/html/rfc3463#section-3.2>
info!("Received extended status code {} for a transient error. This looks like a misconfigured SMTP server, let's fail immediately", first_word);
info!(context, "Received extended status code {} for a transient error. This looks like a misconfigured SMTP server, let's fail immediately", first_word);
SendResult::Failure(format_err!("Permanent SMTP error: {}", err))
} else {
info!(
context,
"Transient error with status code {}, postponing retry for later",
first_word
);
SendResult::Retry
}
} else {
info!("Transient error without status code, postponing retry for later");
info!(
context,
"Transient error without status code, postponing retry for later"
);
SendResult::Retry
}
}
_ => {
info!(
context,
"Message sending failed without error returned by the server, retry later"
);
SendResult::Retry
@@ -460,7 +464,7 @@ pub(crate) async fn smtp_send(
};
// this clears last_success info
info!("Failed to send message over SMTP, disconnecting");
info!(context, "Failed to send message over SMTP, disconnecting");
smtp.disconnect().await;
res
@@ -468,19 +472,19 @@ pub(crate) async fn smtp_send(
Err(crate::smtp::send::Error::Envelope(err)) => {
// Local error, job is invalid, do not retry.
smtp.disconnect().await;
warn!("SMTP job is invalid: {}", err);
warn!(context, "SMTP job is invalid: {}", err);
SendResult::Failure(err)
}
Err(crate::smtp::send::Error::NoTransport) => {
// Should never happen.
// It does not even make sense to disconnect here.
error!("SMTP job failed because SMTP has no transport");
error!(context, "SMTP job failed because SMTP has no transport");
SendResult::Failure(format_err!("SMTP has not transport"))
}
Err(crate::smtp::send::Error::Other(err)) => {
// Local error, job is invalid, do not retry.
smtp.disconnect().await;
warn!("unable to load job: {}", err);
warn!(context, "unable to load job: {}", err);
SendResult::Failure(err)
}
Ok(()) => SendResult::Success,
@@ -547,8 +551,8 @@ pub(crate) async fn send_msg_to_smtp(
bail!("Number of retries exceeded the limit");
}
info!(
"Try number {} to send message {} over SMTP",
retries, msg_id
context,
"Try number {} to send message {} over SMTP", retries, msg_id
);
let recipients_list = recipients
@@ -557,7 +561,7 @@ pub(crate) async fn send_msg_to_smtp(
|addr| match async_smtp::EmailAddress::new(addr.to_string()) {
Ok(addr) => Some(addr),
Err(err) => {
warn!("invalid recipient: {} {:?}", addr, err);
warn!(context, "invalid recipient: {} {:?}", addr, err);
None
}
},
@@ -570,7 +574,10 @@ pub(crate) async fn send_msg_to_smtp(
.await
.with_context(|| format!("failed to check message {msg_id} existence"))?
{
info!("Sending of message {} was cancelled by the user.", msg_id);
info!(
context,
"Sending of message {} was cancelled by the user.", msg_id
);
return Ok(());
}
@@ -600,7 +607,7 @@ pub(crate) async fn send_msg_to_smtp(
async fn send_mdns(context: &Context, connection: &mut Smtp) -> Result<()> {
loop {
if !context.ratelimit.read().await.can_send() {
info!("Ratelimiter does not allow sending MDNs now");
info!(context, "Ratelimiter does not allow sending MDNs now");
return Ok(());
}
@@ -708,7 +715,7 @@ async fn send_mdn_msg_id(
match smtp_send(context, &recipients, &body, smtp, msg_id).await {
SendResult::Success => {
info!("Successfully sent MDN for {}", msg_id);
info!(context, "Successfully sent MDN for {}", msg_id);
context
.sql
.execute("DELETE FROM smtp_mdns WHERE msg_id = ?", paramsv![msg_id])
@@ -726,7 +733,10 @@ async fn send_mdn_msg_id(
Ok(())
}
SendResult::Retry => {
info!("Temporary SMTP failure while sending an MDN for {}", msg_id);
info!(
context,
"Temporary SMTP failure while sending an MDN for {}", msg_id
);
Ok(())
}
SendResult::Failure(err) => Err(err),
@@ -741,7 +751,7 @@ async fn send_mdn(context: &Context, smtp: &mut Smtp) -> Result<bool> {
context.sql.execute("DELETE FROM smtp_mdns", []).await?;
return Ok(false);
}
info!("Sending MDNs");
info!(context, "Sending MDNs");
context
.sql

View File

@@ -1,7 +1,6 @@
//! # SMTP message sending
use async_smtp::{EmailAddress, Envelope, SendableEmail};
use tracing::warn;
use super::Smtp;
use crate::config::Config;
@@ -71,8 +70,8 @@ impl Smtp {
self.last_success = Some(std::time::SystemTime::now());
} else {
warn!(
"uh? SMTP has no transport, failed to send to {}",
recipients_display
context,
"uh? SMTP has no transport, failed to send to {}", recipients_display
);
return Err(Error::NoTransport);
}

View File

@@ -22,7 +22,6 @@ use crate::param::{Param, Params};
use crate::peerstate::{deduplicate_peerstates, Peerstate};
use crate::stock_str;
use crate::tools::{delete_file, time};
use tracing::{error, info, warn};
#[allow(missing_docs)]
#[macro_export]
@@ -217,7 +216,7 @@ impl Sql {
// the structure is complete now and all objects are usable
if recalc_fingerprints {
info!("[migration] recalc fingerprints");
info!(context, "[migration] recalc fingerprints");
let addrs = self
.query_map(
"SELECT addr FROM acpeerstates;",
@@ -266,7 +265,7 @@ impl Sql {
.await?
}
Err(e) => {
warn!("Migrations can't recode avatar, removing. {:#}", e);
warn!(context, "Migrations can't recode avatar, removing. {:#}", e);
context.set_config(Config::Selfavatar, None).await?
}
}
@@ -281,8 +280,8 @@ impl Sql {
pub async fn open(&self, context: &Context, passphrase: String) -> Result<()> {
if self.is_open().await {
error!(
"Cannot open, database \"{:?}\" already opened.",
self.dbfile,
context,
"Cannot open, database \"{:?}\" already opened.", self.dbfile,
);
bail!("SQL database is already opened.");
}
@@ -292,7 +291,7 @@ impl Sql {
self.close().await;
Err(err)
} else {
info!("Opened database {:?}.", self.dbfile);
info!(context, "Opened database {:?}.", self.dbfile);
*self.is_encrypted.write().await = Some(passphrase_nonempty);
// setup debug logging if there is an entry containing its id
@@ -698,19 +697,28 @@ fn new_connection(path: &Path, passphrase: &str) -> Result<Connection> {
/// Cleanup the account to restore some storage and optimize the database.
pub async fn housekeeping(context: &Context) -> Result<()> {
if let Err(err) = remove_unused_files(context).await {
warn!("Housekeeping: cannot remove unused files: {err:#}.");
warn!(
context,
"Housekeeping: cannot remove unused files: {:#}", err
);
}
if let Err(err) = start_ephemeral_timers(context).await {
warn!("Housekeeping: cannot start ephemeral timers: {err:#}.");
warn!(
context,
"Housekeeping: cannot start ephemeral timers: {:#}", err
);
}
if let Err(err) = prune_tombstones(&context.sql).await {
warn!("Housekeeping: Cannot prune message tombstones: {err:#}.");
warn!(
context,
"Housekeeping: Cannot prune message tombstones: {:#}", err
);
}
if let Err(err) = deduplicate_peerstates(&context.sql).await {
warn!("Failed to deduplicate peerstates: {err:#}.")
warn!(context, "Failed to deduplicate peerstates: {:#}", err)
}
context.schedule_quota_update().await?;
@@ -723,11 +731,11 @@ pub async fn housekeeping(context: &Context) -> Result<()> {
.await
{
Err(err) => {
warn!("Failed to run incremental vacuum: {err:#}.");
warn!(context, "Failed to run incremental vacuum: {err:#}");
}
Ok(Some(())) => {
// Incremental vacuum returns a zero-column result if it did anything.
info!("Successfully ran incremental vacuum.");
info!(context, "Successfully ran incremental vacuum.");
}
Ok(None) => {
// Incremental vacuum returned `SQLITE_DONE` immediately,
@@ -739,7 +747,7 @@ pub async fn housekeeping(context: &Context) -> Result<()> {
.set_config(Config::LastHousekeeping, Some(&time().to_string()))
.await
{
warn!("Can't set config: {e:#}.");
warn!(context, "Can't set config: {}", e);
}
context
@@ -751,7 +759,7 @@ pub async fn housekeeping(context: &Context) -> Result<()> {
.await
.ok_or_log_msg(context, "failed to remove old MDNs");
info!("Housekeeping done.");
info!(context, "Housekeeping done.");
Ok(())
}
@@ -760,7 +768,7 @@ pub async fn remove_unused_files(context: &Context) -> Result<()> {
let mut files_in_use = HashSet::new();
let mut unreferenced_count = 0;
info!("Start housekeeping...");
info!(context, "Start housekeeping...");
maybe_add_from_param(
&context.sql,
&mut files_in_use,
@@ -806,7 +814,7 @@ pub async fn remove_unused_files(context: &Context) -> Result<()> {
.await
.context("housekeeping: failed to SELECT value FROM config")?;
info!("{} files in use.", files_in_use.len());
info!(context, "{} files in use.", files_in_use.len(),);
/* go through directories and delete unused files */
let blobdir = context.get_blobdir();
for p in [&blobdir.join(BLOBS_BACKUP_NAME), blobdir] {
@@ -837,7 +845,8 @@ pub async fn remove_unused_files(context: &Context) -> Result<()> {
// The dir could be created not by a user, but by a desktop
// environment f.e. So, no warning.
info!(
"Housekeeping: Cannot rmdir {}: {:#}.",
context,
"Housekeeping: Cannot rmdir {}: {:#}",
entry.path().display(),
e
);
@@ -858,7 +867,8 @@ pub async fn remove_unused_files(context: &Context) -> Result<()> {
&& (recently_created || recently_modified || recently_accessed)
{
info!(
"Housekeeping: Keeping new unreferenced file #{}: {:?}.",
context,
"Housekeeping: Keeping new unreferenced file #{}: {:?}",
unreferenced_count,
entry.file_name(),
);
@@ -868,13 +878,15 @@ pub async fn remove_unused_files(context: &Context) -> Result<()> {
unreferenced_count += 1;
}
info!(
"Housekeeping: Deleting unreferenced file #{}: {:?}.",
context,
"Housekeeping: Deleting unreferenced file #{}: {:?}",
unreferenced_count,
entry.file_name()
);
let path = entry.path();
if let Err(err) = delete_file(context, &path).await {
error!(
context,
"Failed to delete unused file {}: {:#}.",
path.display(),
err
@@ -883,7 +895,12 @@ pub async fn remove_unused_files(context: &Context) -> Result<()> {
}
}
Err(err) => {
warn!("Housekeeping: Cannot read dir {}: {:#}.", p.display(), err);
warn!(
context,
"Housekeeping: Cannot read dir {}: {:#}",
p.display(),
err
);
}
}
}
@@ -1145,6 +1162,7 @@ mod tests {
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_migration_flags() -> Result<()> {
let t = TestContext::new().await;
t.evtracker.get_info_contains("Opened database").await;
// as migrations::run() was already executed on context creation,
// another call should not result in any action needed.
@@ -1156,9 +1174,7 @@ mod tests {
assert!(!disable_server_delete);
assert!(!recode_avatar);
t.emit_event(crate::EventType::Info(
"test_migration_flags: XXX END MARKER".to_string(),
));
info!(&t, "test_migration_flags: XXX END MARKER");
loop {
let evt = t

View File

@@ -1,7 +1,6 @@
//! Migrations module.
use anyhow::{Context as _, Result};
use tracing::{info, warn};
use crate::config::Config;
use crate::constants::ShowEmails;
@@ -373,7 +372,7 @@ UPDATE chats SET protected=1, type=120 WHERE type=130;"#,
)
.await?;
} else {
warn!("Can't parse configured address: {:?}", addr);
warn!(context, "Can't parse configured address: {:?}", addr);
}
}
@@ -716,7 +715,10 @@ CREATE INDEX smtp_messageid ON imap(rfc724_mid);
} else {
"Created new database; "
};
info!("{}[migration] v{}-v{}", created_db, dbversion, new_version);
info!(
context,
"{}[migration] v{}-v{}", created_db, dbversion, new_version
);
}
Ok((

View File

@@ -4,7 +4,6 @@ use anyhow::Result;
use lettre_email::mime::{self};
use lettre_email::PartBuilder;
use serde::{Deserialize, Serialize};
use tracing::{info, warn};
use crate::chat::{Chat, ChatId};
use crate::config::Config;
@@ -91,8 +90,8 @@ impl Context {
let chat = Chat::load_from_db(self, chat_id).await?;
if !chat.is_promoted() {
info!(
"group '{}' not yet promoted, do not sync tokens yet.",
chat.grpid
self,
"group '{}' not yet promoted, do not sync tokens yet.", chat.grpid
);
return Ok(());
}
@@ -227,7 +226,7 @@ impl Context {
/// If eg. just an item cannot be deleted,
/// that should not hold off the other items to be executed.
pub(crate) async fn execute_sync_items(&self, items: &SyncItems) -> Result<()> {
info!("executing {} sync item(s)", items.items.len());
info!(self, "executing {} sync item(s)", items.items.len());
for item in &items.items {
match &item.data {
AddQrToken(token) => {
@@ -237,7 +236,10 @@ impl Context {
{
Some(chat_id)
} else {
warn!("Ignoring token for nonexistent/deleted group '{}'.", grpid);
warn!(
self,
"Ignoring token for nonexistent/deleted group '{}'.", grpid
);
continue;
}
} else {

View File

@@ -17,7 +17,6 @@ use tempfile::{tempdir, TempDir};
use tokio::runtime::Handle;
use tokio::sync::RwLock;
use tokio::task;
use tracing::warn;
use crate::chat::{self, Chat, ChatId, MessageListOptions};
use crate::chatlist::Chatlist;
@@ -537,8 +536,8 @@ impl TestContext {
.expect("add_or_lookup");
match modified {
Modifier::None => (),
Modifier::Modified => warn!("Contact {} modified by TestContext", &addr),
Modifier::Created => warn!("Contact {} created by TestContext", &addr),
Modifier::Modified => warn!(&self.ctx, "Contact {} modified by TestContext", &addr),
Modifier::Created => warn!(&self.ctx, "Contact {} created by TestContext", &addr),
}
Contact::load_from_db(&self.ctx, contact_id).await.unwrap()
}

View File

@@ -19,7 +19,6 @@ use mailparse::headers::Headers;
use mailparse::MailHeaderMap;
use rand::{thread_rng, Rng};
use tokio::{fs, io};
use tracing::warn;
use crate::chat::{add_device_msg, add_device_msg_with_importance};
use crate::constants::{DC_ELLIPSIS, DC_OUTDATED_WARNING_DAYS};
@@ -201,7 +200,7 @@ async fn maybe_warn_on_bad_time(context: &Context, now: i64, known_past_timestam
.await
.ok();
} else {
warn!("Can't convert current timestamp");
warn!(context, "Can't convert current timestamp");
}
return true;
}
@@ -337,7 +336,7 @@ pub(crate) async fn delete_file(context: &Context, path: impl AsRef<Path>) -> Re
bail!("path {} does not exist", path_abs.display());
}
if !path_abs.is_file() {
warn!("refusing to delete non-file \"{}\".", path.display());
warn!(context, "refusing to delete non-file {}.", path.display());
bail!("not a file: \"{}\"", path.display());
}
@@ -359,7 +358,7 @@ pub async fn delete_files_in_dir(context: &Context, path: impl AsRef<Path>) -> R
Ok(file) => {
delete_file(context, file.file_name()).await?;
}
Err(e) => warn!("Could not read file to delete: {}", e),
Err(e) => warn!(context, "Could not read file to delete: {}", e),
}
}
Ok(())
@@ -375,6 +374,7 @@ pub(crate) async fn create_folder(
Ok(_) => Ok(()),
Err(err) => {
warn!(
context,
"Cannot create directory \"{}\": {}",
path.as_ref().display(),
err
@@ -396,6 +396,7 @@ pub(crate) async fn write_file(
let path_abs = get_abs_path(context, &path);
fs::write(&path_abs, buf).await.map_err(|err| {
warn!(
context,
"Cannot write {} bytes to \"{}\": {}",
buf.len(),
path.as_ref().display(),
@@ -413,6 +414,7 @@ pub async fn read_file(context: &Context, path: impl AsRef<Path>) -> Result<Vec<
Ok(bytes) => Ok(bytes),
Err(err) => {
warn!(
context,
"Cannot read \"{}\" or file is empty: {}",
path.as_ref().display(),
err
@@ -429,6 +431,7 @@ pub async fn open_file(context: &Context, path: impl AsRef<Path>) -> Result<fs::
Ok(bytes) => Ok(bytes),
Err(err) => {
warn!(
context,
"Cannot read \"{}\" or file is empty: {}",
path.as_ref().display(),
err
@@ -449,6 +452,7 @@ pub fn open_file_std<P: AsRef<std::path::Path>>(
Ok(bytes) => Ok(bytes),
Err(err) => {
warn!(
context,
"Cannot read \"{}\" or file is empty: {}",
path.as_ref().display(),
err

View File

@@ -10,7 +10,6 @@ use lettre_email::PartBuilder;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use tokio::io::AsyncReadExt;
use tracing::{info, warn};
use crate::chat::Chat;
use crate::contact::ContactId;
@@ -166,19 +165,6 @@ pub(crate) struct StatusUpdateItemAndSerial {
max_serial: StatusUpdateSerial,
}
/// Returns an entry index and a reference.
fn find_zip_entry<'a>(
file: &'a async_zip::ZipFile,
name: &str,
) -> Option<(usize, &'a async_zip::StoredZipEntry)> {
for (i, ent) in file.entries().iter().enumerate() {
if ent.entry().filename() == name {
return Some((i, ent));
}
}
None
}
impl Context {
/// check if a file is an acceptable webxdc for sending or receiving.
pub(crate) async fn is_webxdc_file(&self, filename: &str, file: &[u8]) -> Result<bool> {
@@ -188,22 +174,22 @@ impl Context {
if file.len() as u64 > WEBXDC_RECEIVING_LIMIT {
info!(
"{} exceeds receiving limit of {} bytes",
&filename, WEBXDC_RECEIVING_LIMIT
self,
"{} exceeds receiving limit of {} bytes", &filename, WEBXDC_RECEIVING_LIMIT
);
return Ok(false);
}
let archive = match async_zip::read::mem::ZipFileReader::new(file.to_vec()).await {
let archive = match async_zip::read::mem::ZipFileReader::new(file).await {
Ok(archive) => archive,
Err(_) => {
info!("{} cannot be opened as zip-file", &filename);
info!(self, "{} cannot be opened as zip-file", &filename);
return Ok(false);
}
};
if find_zip_entry(archive.file(), "index.html").is_none() {
info!("{} misses index.html", &filename);
if archive.entry("index.html").is_none() {
info!(self, "{} misses index.html", &filename);
return Ok(false);
}
@@ -229,15 +215,15 @@ impl Context {
let valid = match async_zip::read::fs::ZipFileReader::new(path).await {
Ok(archive) => {
if find_zip_entry(archive.file(), "index.html").is_none() {
info!("{} misses index.html", filename);
if archive.entry("index.html").is_none() {
info!(self, "{} misses index.html", filename);
false
} else {
true
}
}
Err(_) => {
info!("{} cannot be opened as zip-file", filename);
info!(self, "{} cannot be opened as zip-file", filename);
false
}
};
@@ -663,9 +649,10 @@ fn parse_webxdc_manifest(bytes: &[u8]) -> Result<WebxdcManifest> {
}
async fn get_blob(archive: &mut async_zip::read::fs::ZipFileReader, name: &str) -> Result<Vec<u8>> {
let (i, _) = find_zip_entry(archive.file(), name)
let (i, _) = archive
.entry(name)
.ok_or_else(|| anyhow!("no entry found for {}", name))?;
let mut reader = archive.entry(i).await?;
let mut reader = archive.entry_reader(i).await?;
let mut buf = Vec::new();
reader.read_to_end(&mut buf).await?;
Ok(buf)
@@ -752,7 +739,7 @@ impl Message {
if let Some(ref name) = manifest.name {
let name = name.trim();
if name.is_empty() {
warn!("empty name given in manifest");
warn!(context, "empty name given in manifest");
manifest.name = None;
}
}
@@ -767,9 +754,9 @@ impl Message {
} else {
self.get_filename().unwrap_or_default()
},
icon: if find_zip_entry(archive.file(), "icon.png").is_some() {
icon: if archive.entry("icon.png").is_some() {
"icon.png".to_string()
} else if find_zip_entry(archive.file(), "icon.jpg").is_some() {
} else if archive.entry("icon.jpg").is_some() {
"icon.jpg".to_string()
} else {
WEBXDC_DEFAULT_ICON.to_string()