From 6ee165fddc2e85b361a137bfad1cfeb65327e185 Mon Sep 17 00:00:00 2001 From: link2xt Date: Thu, 16 Feb 2023 15:34:53 +0000 Subject: [PATCH 01/24] python: type annotations for testplugin.py --- python/src/deltachat/account.py | 8 +++++++- python/src/deltachat/testplugin.py | 26 +++++++++++++++++--------- 2 files changed, 24 insertions(+), 10 deletions(-) diff --git a/python/src/deltachat/account.py b/python/src/deltachat/account.py index 61d74d793..e6749c7ba 100644 --- a/python/src/deltachat/account.py +++ b/python/src/deltachat/account.py @@ -6,7 +6,7 @@ from array import array from contextlib import contextmanager from email.utils import parseaddr from threading import Event -from typing import Any, Dict, Generator, List, Optional, Union +from typing import Any, Dict, Generator, List, Optional, Union, TYPE_CHECKING from . import const, hookspec from .capi import ffi, lib @@ -22,6 +22,9 @@ from .cutil import ( from .message import Message from .tracker import ConfigureTracker, ImexTracker +if TYPE_CHECKING: + from .events import FFIEventTracker + class MissingCredentials(ValueError): """Account is missing `addr` and `mail_pw` config values.""" @@ -60,6 +63,9 @@ class Account: MissingCredentials = MissingCredentials + _logid: str + _evtracker: "FFIEventTracker" + def __init__(self, db_path, os_name=None, logging=True, closed=False) -> None: from .events import EventThread diff --git a/python/src/deltachat/testplugin.py b/python/src/deltachat/testplugin.py index d9d3e4ebb..08ae8935a 100644 --- a/python/src/deltachat/testplugin.py +++ b/python/src/deltachat/testplugin.py @@ -275,6 +275,8 @@ class ACSetup: CONFIGURED = "CONFIGURED" IDLEREADY = "IDLEREADY" + _configured_events: Queue + def __init__(self, testprocess, init_time): self._configured_events = Queue() self._account2state = {} @@ -376,8 +378,13 @@ class ACSetup: class ACFactory: + """Account factory""" + + init_time: float _finalizers: List[Callable[[], None]] _accounts: List[Account] + _acsetup: ACSetup + _preconfigured_keys: List[str] def __init__(self, request, testprocess, tmpdir, data) -> None: self.init_time = time.time() @@ -429,14 +436,15 @@ class ACFactory: assert "addr" in configdict and "mail_pw" in configdict return configdict - def _get_cached_account(self, addr): + def _get_cached_account(self, addr) -> Optional[Account]: if addr in self.testprocess._addr2files: return self._getaccount(addr) + return None - def get_unconfigured_account(self, closed=False): + def get_unconfigured_account(self, closed=False) -> Account: return self._getaccount(closed=closed) - def _getaccount(self, try_cache_addr=None, closed=False): + def _getaccount(self, try_cache_addr=None, closed=False) -> Account: logid = f"ac{len(self._accounts) + 1}" # we need to use fixed database basename for maybe_cache_* functions to work path = self.tmpdir.mkdir(logid).join("dc.db") @@ -450,10 +458,10 @@ class ACFactory: self._accounts.append(ac) return ac - def set_logging_default(self, logging): + def set_logging_default(self, logging) -> None: self._logging = bool(logging) - def remove_preconfigured_keys(self): + def remove_preconfigured_keys(self) -> None: self._preconfigured_keys = [] def _preconfigure_key(self, account, addr): @@ -491,7 +499,7 @@ class ACFactory: self._acsetup.init_logging(ac) return ac - def new_online_configuring_account(self, cloned_from=None, cache=False, **kwargs): + def new_online_configuring_account(self, cloned_from=None, cache=False, **kwargs) -> Account: if cloned_from is None: configdict = self.get_next_liveconfig() else: @@ -513,7 +521,7 @@ class ACFactory: self._acsetup.start_configure(ac) return ac - def prepare_account_from_liveconfig(self, configdict): + def prepare_account_from_liveconfig(self, configdict) -> Account: ac = self.get_unconfigured_account() assert "addr" in configdict and "mail_pw" in configdict, configdict configdict.setdefault("bcc_self", False) @@ -523,11 +531,11 @@ class ACFactory: self._preconfigure_key(ac, configdict["addr"]) return ac - def wait_configured(self, account): + def wait_configured(self, account) -> None: """Wait until the specified account has successfully completed configure.""" self._acsetup.wait_one_configured(account) - def bring_accounts_online(self): + def bring_accounts_online(self) -> None: print("bringing accounts online") self._acsetup.bring_online() print("all accounts online") From a34aeb240ab233aaa5783f33d95f2fc8082df4ee Mon Sep 17 00:00:00 2001 From: link2xt Date: Thu, 16 Feb 2023 16:58:42 +0000 Subject: [PATCH 02/24] Increase database timeout to 60 seconds --- src/sql.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sql.rs b/src/sql.rs index 6b967de17..f103ace39 100644 --- a/src/sql.rs +++ b/src/sql.rs @@ -213,7 +213,7 @@ impl Sql { PRAGMA temp_store=memory; -- Avoid SQLITE_IOERR_GETTEMPPATH errors on Android PRAGMA foreign_keys=on; ", - Duration::from_secs(10).as_millis() + Duration::from_secs(60).as_millis() ))?; c.pragma_update(None, "key", passphrase.clone())?; // Try to enable auto_vacuum. This will only be From 870527de1ef4fe0f5abcd36923d341c3e49430d5 Mon Sep 17 00:00:00 2001 From: link2xt Date: Fri, 17 Feb 2023 10:35:05 +0000 Subject: [PATCH 03/24] Remove r2d2_sqlite dependency --- CHANGELOG.md | 1 + Cargo.lock | 11 ------ Cargo.toml | 1 - src/sql.rs | 48 ++++------------------- src/sql/connection_manager.rs | 73 +++++++++++++++++++++++++++++++++++ 5 files changed, 82 insertions(+), 52 deletions(-) create mode 100644 src/sql/connection_manager.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index f3781b5d3..be08622ee 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ - deltachat-rpc-client: use `dataclass` for `Account`, `Chat`, `Contact` and `Message` #4042 - python: mark bindings as supporting typing according to PEP 561 #4045 - retry filesystem operations during account migration #4043 +- remove `r2d2_sqlite` dependency #4050 ### Fixes - deltachat-rpc-server: do not block stdin while processing the request. #4041 diff --git a/Cargo.lock b/Cargo.lock index 097a27059..90f72d4f5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -935,7 +935,6 @@ dependencies = [ "qrcodegen", "quick-xml", "r2d2", - "r2d2_sqlite", "rand 0.8.5", "ratelimit", "regex", @@ -2816,16 +2815,6 @@ dependencies = [ "scheduled-thread-pool", ] -[[package]] -name = "r2d2_sqlite" -version = "0.20.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fdc8e4da70586127893be32b7adf21326a4c6b1aba907611edf467d13ffe895" -dependencies = [ - "r2d2", - "rusqlite", -] - [[package]] name = "radix_trie" version = "0.2.1" diff --git a/Cargo.toml b/Cargo.toml index e74796f5f..459ee7c38 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -59,7 +59,6 @@ pgp = { version = "0.9", default-features = false } pretty_env_logger = { version = "0.4", optional = true } quick-xml = "0.27" r2d2 = "0.8" -r2d2_sqlite = "0.20" rand = "0.8" regex = "1.7" rusqlite = { version = "0.27", features = ["sqlcipher"] } diff --git a/src/sql.rs b/src/sql.rs index f103ace39..c6315f680 100644 --- a/src/sql.rs +++ b/src/sql.rs @@ -7,7 +7,7 @@ use std::path::PathBuf; use std::time::Duration; use anyhow::{bail, Context as _, Result}; -use rusqlite::{config::DbConfig, Connection, OpenFlags}; +use rusqlite::{self, config::DbConfig, Connection}; use tokio::sync::RwLock; use crate::blob::BlobObject; @@ -47,8 +47,11 @@ pub(crate) fn params_iter(iter: &[impl crate::ToSql]) -> impl Iterator>>, + pool: RwLock>>, /// None if the database is not open, true if it is open with passphrase and false if it is /// open without a passphrase. @@ -192,44 +195,11 @@ impl Sql { }) } - fn new_pool( - dbfile: &Path, - passphrase: String, - ) -> Result> { - let mut open_flags = OpenFlags::SQLITE_OPEN_NO_MUTEX; - open_flags.insert(OpenFlags::SQLITE_OPEN_READ_WRITE); - open_flags.insert(OpenFlags::SQLITE_OPEN_CREATE); - + fn new_pool(dbfile: &Path, passphrase: String) -> Result> { // this actually creates min_idle database handles just now. // therefore, with_init() must not try to modify the database as otherwise // we easily get busy-errors (eg. table-creation, journal_mode etc. should be done on only one handle) - let mgr = r2d2_sqlite::SqliteConnectionManager::file(dbfile) - .with_flags(open_flags) - .with_init(move |c| { - c.execute_batch(&format!( - "PRAGMA cipher_memory_security = OFF; -- Too slow on Android - PRAGMA secure_delete=on; - PRAGMA busy_timeout = {}; - PRAGMA temp_store=memory; -- Avoid SQLITE_IOERR_GETTEMPPATH errors on Android - PRAGMA foreign_keys=on; - ", - Duration::from_secs(60).as_millis() - ))?; - c.pragma_update(None, "key", passphrase.clone())?; - // Try to enable auto_vacuum. This will only be - // applied if the database is new or after successful - // VACUUM, which usually happens before backup export. - // When auto_vacuum is INCREMENTAL, it is possible to - // use PRAGMA incremental_vacuum to return unused - // database pages to the filesystem. - c.pragma_update(None, "auto_vacuum", "INCREMENTAL".to_string())?; - - c.pragma_update(None, "journal_mode", "WAL".to_string())?; - // Default synchronous=FULL is much slower. NORMAL is sufficient for WAL mode. - c.pragma_update(None, "synchronous", "NORMAL".to_string())?; - Ok(()) - }); - + let mgr = ConnectionManager::new(dbfile.to_path_buf(), passphrase); let pool = r2d2::Pool::builder() .min_idle(Some(2)) .max_size(10) @@ -393,9 +363,7 @@ impl Sql { } /// Allocates a connection from the connection pool and returns it. - pub async fn get_conn( - &self, - ) -> Result> { + pub(crate) async fn get_conn(&self) -> Result> { let lock = self.pool.read().await; let pool = lock.as_ref().context("no SQL connection")?; let conn = pool.get()?; diff --git a/src/sql/connection_manager.rs b/src/sql/connection_manager.rs new file mode 100644 index 000000000..f0d556c79 --- /dev/null +++ b/src/sql/connection_manager.rs @@ -0,0 +1,73 @@ +use std::path::PathBuf; +use std::time::Duration; + +use r2d2::ManageConnection; +use rusqlite::{Connection, Error, OpenFlags}; + +#[derive(Debug)] +pub struct ConnectionManager { + /// Database file path. + path: PathBuf, + + /// SQLite open flags. + flags: rusqlite::OpenFlags, + + /// SQLCipher database passphrase. + /// Empty string if database is not encrypted. + passphrase: String, +} + +impl ConnectionManager { + /// Creates new connection manager. + pub fn new(path: PathBuf, passphrase: String) -> Self { + let mut flags = OpenFlags::SQLITE_OPEN_NO_MUTEX; + flags.insert(OpenFlags::SQLITE_OPEN_READ_WRITE); + flags.insert(OpenFlags::SQLITE_OPEN_CREATE); + + Self { + path, + flags, + passphrase, + } + } +} + +impl ManageConnection for ConnectionManager { + type Connection = Connection; + type Error = Error; + + fn connect(&self) -> Result { + let conn = Connection::open_with_flags(&self.path, self.flags)?; + conn.execute_batch(&format!( + "PRAGMA cipher_memory_security = OFF; -- Too slow on Android + PRAGMA secure_delete=on; + PRAGMA busy_timeout = {}; + PRAGMA temp_store=memory; -- Avoid SQLITE_IOERR_GETTEMPPATH errors on Android + PRAGMA foreign_keys=on; + ", + Duration::from_secs(60).as_millis() + ))?; + conn.pragma_update(None, "key", &self.passphrase)?; + // Try to enable auto_vacuum. This will only be + // applied if the database is new or after successful + // VACUUM, which usually happens before backup export. + // When auto_vacuum is INCREMENTAL, it is possible to + // use PRAGMA incremental_vacuum to return unused + // database pages to the filesystem. + conn.pragma_update(None, "auto_vacuum", "INCREMENTAL".to_string())?; + + conn.pragma_update(None, "journal_mode", "WAL".to_string())?; + // Default synchronous=FULL is much slower. NORMAL is sufficient for WAL mode. + conn.pragma_update(None, "synchronous", "NORMAL".to_string())?; + + Ok(conn) + } + + fn is_valid(&self, _conn: &mut Connection) -> Result<(), Error> { + Ok(()) + } + + fn has_broken(&self, _conn: &mut Connection) -> bool { + false + } +} From 7586bcf45e963c0045464a2232a2c75e9371c096 Mon Sep 17 00:00:00 2001 From: link2xt Date: Fri, 17 Feb 2023 14:30:24 +0000 Subject: [PATCH 04/24] Update rusqlite to 0.28 --- Cargo.lock | 28 +++++++++------------------- Cargo.toml | 2 +- deltachat-repl/Cargo.toml | 2 +- 3 files changed, 11 insertions(+), 21 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 90f72d4f5..3fcf0d9eb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1666,15 +1666,6 @@ version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" -[[package]] -name = "hashbrown" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" -dependencies = [ - "ahash", -] - [[package]] name = "hashbrown" version = "0.12.3" @@ -1686,11 +1677,11 @@ dependencies = [ [[package]] name = "hashlink" -version = "0.7.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7249a3129cbc1ffccd74857f81464a323a152173cdb134e0fd81bc803b29facf" +checksum = "69fe1fcf8b4278d860ad0548329f892a3631fb63f82574df68275f34cdbe0ffa" dependencies = [ - "hashbrown 0.11.2", + "hashbrown", ] [[package]] @@ -1923,7 +1914,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" dependencies = [ "autocfg", - "hashbrown 0.12.3", + "hashbrown", ] [[package]] @@ -2082,9 +2073,9 @@ checksum = "292a948cd991e376cf75541fe5b97a1081d713c618b4f1b9500f8844e49eb565" [[package]] name = "libsqlite3-sys" -version = "0.24.2" +version = "0.25.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "898745e570c7d0453cc1fbc4a701eb6c662ed54e8fec8b7d14be137ebeeb9d14" +checksum = "29f835d03d717946d28b1d1ed632eb6f0e24a299388ee623d0c23118d3e8a7fa" dependencies = [ "cc", "openssl-sys", @@ -3058,16 +3049,15 @@ dependencies = [ [[package]] name = "rusqlite" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85127183a999f7db96d1a976a309eebbfb6ea3b0b400ddd8340190129de6eb7a" +checksum = "01e213bc3ecb39ac32e81e51ebe31fd888a940515173e3a18a35f8c6e896422a" dependencies = [ "bitflags", "fallible-iterator", "fallible-streaming-iterator", "hashlink", "libsqlite3-sys", - "memchr", "smallvec", ] @@ -3966,7 +3956,7 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c5faade31a542b8b35855fff6e8def199853b2da8da256da52f52f1316ee3137" dependencies = [ - "hashbrown 0.12.3", + "hashbrown", "regex", ] diff --git a/Cargo.toml b/Cargo.toml index 459ee7c38..71c493284 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -61,7 +61,7 @@ quick-xml = "0.27" r2d2 = "0.8" rand = "0.8" regex = "1.7" -rusqlite = { version = "0.27", features = ["sqlcipher"] } +rusqlite = { version = "0.28", features = ["sqlcipher"] } rust-hsluv = "0.1" sanitize-filename = "0.4" serde_json = "1.0" diff --git a/deltachat-repl/Cargo.toml b/deltachat-repl/Cargo.toml index 9dca2a2ae..0b132762f 100644 --- a/deltachat-repl/Cargo.toml +++ b/deltachat-repl/Cargo.toml @@ -10,7 +10,7 @@ deltachat = { path = "..", features = ["internals"]} dirs = "4" log = "0.4.16" pretty_env_logger = "0.4" -rusqlite = "0.27" +rusqlite = "0.28" rustyline = "10" tokio = { version = "1", features = ["fs", "rt-multi-thread", "macros"] } From 48fee4fc92d30c5f4688c535fc315b2fd6f6b87b Mon Sep 17 00:00:00 2001 From: link2xt Date: Sat, 18 Feb 2023 11:31:07 +0000 Subject: [PATCH 05/24] python: replace "while 1" with "while True" This makes pyright recognize that the function never returns implicitly. --- python/src/deltachat/direct_imap.py | 4 ++-- python/src/deltachat/events.py | 16 ++++++++-------- python/src/deltachat/testplugin.py | 6 +++--- python/src/deltachat/tracker.py | 2 +- 4 files changed, 14 insertions(+), 14 deletions(-) diff --git a/python/src/deltachat/direct_imap.py b/python/src/deltachat/direct_imap.py index 193924055..d95bd29ab 100644 --- a/python/src/deltachat/direct_imap.py +++ b/python/src/deltachat/direct_imap.py @@ -207,14 +207,14 @@ class IdleManager: return res def wait_for_new_message(self, timeout=None) -> bytes: - while 1: + while True: for item in self.check(timeout=timeout): if b"EXISTS" in item or b"RECENT" in item: return item def wait_for_seen(self, timeout=None) -> int: """Return first message with SEEN flag from a running idle-stream.""" - while 1: + while True: for item in self.check(timeout=timeout): if FETCH in item: self.log(str(item)) diff --git a/python/src/deltachat/events.py b/python/src/deltachat/events.py index d88876004..006095da0 100644 --- a/python/src/deltachat/events.py +++ b/python/src/deltachat/events.py @@ -108,7 +108,7 @@ class FFIEventTracker: return ev def iter_events(self, timeout=None, check_error=True): - while 1: + while True: yield self.get(timeout=timeout, check_error=check_error) def get_matching(self, event_name_regex, check_error=True, timeout=None): @@ -119,14 +119,14 @@ class FFIEventTracker: def get_info_contains(self, regex: str) -> FFIEvent: rex = re.compile(regex) - while 1: + while True: ev = self.get_matching("DC_EVENT_INFO") if rex.search(ev.data2): return ev def get_info_regex_groups(self, regex, check_error=True): rex = re.compile(regex) - while 1: + while True: ev = self.get_matching("DC_EVENT_INFO", check_error=check_error) m = rex.match(ev.data2) if m is not None: @@ -137,7 +137,7 @@ class FFIEventTracker: This only works reliably if the connectivity doesn't change again too quickly, otherwise we might miss it. """ - while 1: + while True: if self.account.get_connectivity() == connectivity: return self.get_matching("DC_EVENT_CONNECTIVITY_CHANGED") @@ -146,7 +146,7 @@ class FFIEventTracker: """Wait until the connectivity changes to `expected_next`. Fails the test if it changes to something else. """ - while 1: + while True: current = self.account.get_connectivity() if current == expected_next: return @@ -156,7 +156,7 @@ class FFIEventTracker: self.get_matching("DC_EVENT_CONNECTIVITY_CHANGED") def wait_for_all_work_done(self): - while 1: + while True: if self.account.all_work_done(): return self.get_matching("DC_EVENT_CONNECTIVITY_CHANGED") @@ -164,7 +164,7 @@ class FFIEventTracker: def ensure_event_not_queued(self, event_name_regex): __tracebackhide__ = True rex = re.compile(f"(?:{event_name_regex}).*") - while 1: + while True: try: ev = self._event_queue.get(False) except Empty: @@ -173,7 +173,7 @@ class FFIEventTracker: assert not rex.match(ev.name), f"event found {ev}" def wait_securejoin_inviter_progress(self, target): - while 1: + while True: event = self.get_matching("DC_EVENT_SECUREJOIN_INVITER_PROGRESS") if event.data2 >= target: print(f"** SECUREJOINT-INVITER PROGRESS {target}", self.account) diff --git a/python/src/deltachat/testplugin.py b/python/src/deltachat/testplugin.py index 08ae8935a..ebbb13aaf 100644 --- a/python/src/deltachat/testplugin.py +++ b/python/src/deltachat/testplugin.py @@ -309,7 +309,7 @@ class ACSetup: def wait_one_configured(self, account): """wait until this account has successfully configured.""" if self._account2state[account] == self.CONFIGURING: - while 1: + while True: acc = self._pop_config_success() if acc == account: break @@ -638,7 +638,7 @@ class BotProcess: def _run_stdout_thread(self) -> None: try: - while 1: + while True: line = self.popen.stdout.readline() if not line: break @@ -659,7 +659,7 @@ class BotProcess: for next_pattern in patterns: print("+++FNMATCH:", next_pattern) ignored = [] - while 1: + while True: line = self.stdout_queue.get() if line is None: if ignored: diff --git a/python/src/deltachat/tracker.py b/python/src/deltachat/tracker.py index 24665ea64..2dc2ad41b 100644 --- a/python/src/deltachat/tracker.py +++ b/python/src/deltachat/tracker.py @@ -85,7 +85,7 @@ class ConfigureTracker: self._imap_finished.wait() def wait_progress(self, data1=None): - while 1: + while True: evdata = self._progress.get() if data1 is None or evdata == data1: break From ed8e2c48185eea7273f5da620caf2eacc9e7ac40 Mon Sep 17 00:00:00 2001 From: link2xt Date: Fri, 17 Feb 2023 15:54:32 +0000 Subject: [PATCH 06/24] Replace r2d2 with an own connection pool New connection pool does not use threads and does not remove idle connections or create new connections at runtime. --- CHANGELOG.md | 2 +- Cargo.lock | 22 +------ Cargo.toml | 2 +- src/sql.rs | 69 ++++++++++++++++------ src/sql/connection_manager.rs | 73 ----------------------- src/sql/pool.rs | 105 ++++++++++++++++++++++++++++++++++ 6 files changed, 159 insertions(+), 114 deletions(-) delete mode 100644 src/sql/connection_manager.rs create mode 100644 src/sql/pool.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index be08622ee..c645f3c8c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,7 +6,7 @@ - deltachat-rpc-client: use `dataclass` for `Account`, `Chat`, `Contact` and `Message` #4042 - python: mark bindings as supporting typing according to PEP 561 #4045 - retry filesystem operations during account migration #4043 -- remove `r2d2_sqlite` dependency #4050 +- replace `r2d2` and `r2d2_sqlite` dependencies with an own connection pool #4050 #4053 ### Fixes - deltachat-rpc-server: do not block stdin while processing the request. #4041 diff --git a/Cargo.lock b/Cargo.lock index 3fcf0d9eb..b18e6b955 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -928,13 +928,13 @@ dependencies = [ "num-traits", "num_cpus", "once_cell", + "parking_lot", "percent-encoding", "pgp", "pretty_env_logger", "proptest", "qrcodegen", "quick-xml", - "r2d2", "rand 0.8.5", "ratelimit", "regex", @@ -2795,17 +2795,6 @@ version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20f14e071918cbeefc5edc986a7aa92c425dae244e003a35e1cdddb5ca39b5cb" -[[package]] -name = "r2d2" -version = "0.8.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93" -dependencies = [ - "log", - "parking_lot", - "scheduled-thread-pool", -] - [[package]] name = "radix_trie" version = "0.2.1" @@ -3171,15 +3160,6 @@ dependencies = [ "windows-sys 0.36.1", ] -[[package]] -name = "scheduled-thread-pool" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "977a7519bff143a44f842fd07e80ad1329295bd71686457f18e496736f4bf9bf" -dependencies = [ - "parking_lot", -] - [[package]] name = "scopeguard" version = "1.1.0" diff --git a/Cargo.toml b/Cargo.toml index 71c493284..916aa8eaa 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -54,11 +54,11 @@ num_cpus = "1.15" num-derive = "0.3" num-traits = "0.2" once_cell = "1.17.0" +parking_lot = "0.12" percent-encoding = "2.2" pgp = { version = "0.9", default-features = false } pretty_env_logger = { version = "0.4", optional = true } quick-xml = "0.27" -r2d2 = "0.8" rand = "0.8" regex = "1.7" rusqlite = { version = "0.28", features = ["sqlcipher"] } diff --git a/src/sql.rs b/src/sql.rs index c6315f680..718fa5cee 100644 --- a/src/sql.rs +++ b/src/sql.rs @@ -4,10 +4,9 @@ use std::collections::{HashMap, HashSet}; use std::convert::TryFrom; use std::path::Path; use std::path::PathBuf; -use std::time::Duration; use anyhow::{bail, Context as _, Result}; -use rusqlite::{self, config::DbConfig, Connection}; +use rusqlite::{self, config::DbConfig, Connection, OpenFlags}; use tokio::sync::RwLock; use crate::blob::BlobObject; @@ -47,10 +46,10 @@ pub(crate) fn params_iter(iter: &[impl crate::ToSql]) -> impl Iterator>>, + pool: RwLock>, /// None if the database is not open, true if it is open with passphrase and false if it is /// open without a passphrase. @@ -195,17 +194,15 @@ impl Sql { }) } - fn new_pool(dbfile: &Path, passphrase: String) -> Result> { - // this actually creates min_idle database handles just now. - // therefore, with_init() must not try to modify the database as otherwise - // we easily get busy-errors (eg. table-creation, journal_mode etc. should be done on only one handle) - let mgr = ConnectionManager::new(dbfile.to_path_buf(), passphrase); - let pool = r2d2::Pool::builder() - .min_idle(Some(2)) - .max_size(10) - .connection_timeout(Duration::from_secs(60)) - .build(mgr) - .context("Can't build SQL connection pool")?; + /// Creates a new connection pool. + fn new_pool(dbfile: &Path, passphrase: String) -> Result { + let mut connections = Vec::new(); + for _ in 0..3 { + let connection = new_connection(dbfile, &passphrase)?; + connections.push(connection); + } + + let pool = Pool::new(connections); Ok(pool) } @@ -363,10 +360,10 @@ impl Sql { } /// Allocates a connection from the connection pool and returns it. - pub(crate) async fn get_conn(&self) -> Result> { + pub(crate) async fn get_conn(&self) -> Result { let lock = self.pool.read().await; let pool = lock.as_ref().context("no SQL connection")?; - let conn = pool.get()?; + let conn = pool.get(); Ok(conn) } @@ -610,6 +607,42 @@ impl Sql { } } +/// Creates a new SQLite connection. +/// +/// `path` is the database path. +/// +/// `passphrase` is the SQLCipher database passphrase. +/// Empty string if database is not encrypted. +fn new_connection(path: &Path, passphrase: &str) -> Result { + let mut flags = OpenFlags::SQLITE_OPEN_NO_MUTEX; + flags.insert(OpenFlags::SQLITE_OPEN_READ_WRITE); + flags.insert(OpenFlags::SQLITE_OPEN_CREATE); + + let conn = Connection::open_with_flags(path, flags)?; + conn.execute_batch( + "PRAGMA cipher_memory_security = OFF; -- Too slow on Android + PRAGMA secure_delete=on; + PRAGMA busy_timeout = 60000; -- 60 seconds + PRAGMA temp_store=memory; -- Avoid SQLITE_IOERR_GETTEMPPATH errors on Android + PRAGMA foreign_keys=on; + ", + )?; + conn.pragma_update(None, "key", passphrase)?; + // Try to enable auto_vacuum. This will only be + // applied if the database is new or after successful + // VACUUM, which usually happens before backup export. + // When auto_vacuum is INCREMENTAL, it is possible to + // use PRAGMA incremental_vacuum to return unused + // database pages to the filesystem. + conn.pragma_update(None, "auto_vacuum", "INCREMENTAL".to_string())?; + + conn.pragma_update(None, "journal_mode", "WAL".to_string())?; + // Default synchronous=FULL is much slower. NORMAL is sufficient for WAL mode. + conn.pragma_update(None, "synchronous", "NORMAL".to_string())?; + + Ok(conn) +} + /// Cleanup the account to restore some storage and optimize the database. pub async fn housekeeping(context: &Context) -> Result<()> { if let Err(err) = remove_unused_files(context).await { diff --git a/src/sql/connection_manager.rs b/src/sql/connection_manager.rs deleted file mode 100644 index f0d556c79..000000000 --- a/src/sql/connection_manager.rs +++ /dev/null @@ -1,73 +0,0 @@ -use std::path::PathBuf; -use std::time::Duration; - -use r2d2::ManageConnection; -use rusqlite::{Connection, Error, OpenFlags}; - -#[derive(Debug)] -pub struct ConnectionManager { - /// Database file path. - path: PathBuf, - - /// SQLite open flags. - flags: rusqlite::OpenFlags, - - /// SQLCipher database passphrase. - /// Empty string if database is not encrypted. - passphrase: String, -} - -impl ConnectionManager { - /// Creates new connection manager. - pub fn new(path: PathBuf, passphrase: String) -> Self { - let mut flags = OpenFlags::SQLITE_OPEN_NO_MUTEX; - flags.insert(OpenFlags::SQLITE_OPEN_READ_WRITE); - flags.insert(OpenFlags::SQLITE_OPEN_CREATE); - - Self { - path, - flags, - passphrase, - } - } -} - -impl ManageConnection for ConnectionManager { - type Connection = Connection; - type Error = Error; - - fn connect(&self) -> Result { - let conn = Connection::open_with_flags(&self.path, self.flags)?; - conn.execute_batch(&format!( - "PRAGMA cipher_memory_security = OFF; -- Too slow on Android - PRAGMA secure_delete=on; - PRAGMA busy_timeout = {}; - PRAGMA temp_store=memory; -- Avoid SQLITE_IOERR_GETTEMPPATH errors on Android - PRAGMA foreign_keys=on; - ", - Duration::from_secs(60).as_millis() - ))?; - conn.pragma_update(None, "key", &self.passphrase)?; - // Try to enable auto_vacuum. This will only be - // applied if the database is new or after successful - // VACUUM, which usually happens before backup export. - // When auto_vacuum is INCREMENTAL, it is possible to - // use PRAGMA incremental_vacuum to return unused - // database pages to the filesystem. - conn.pragma_update(None, "auto_vacuum", "INCREMENTAL".to_string())?; - - conn.pragma_update(None, "journal_mode", "WAL".to_string())?; - // Default synchronous=FULL is much slower. NORMAL is sufficient for WAL mode. - conn.pragma_update(None, "synchronous", "NORMAL".to_string())?; - - Ok(conn) - } - - fn is_valid(&self, _conn: &mut Connection) -> Result<(), Error> { - Ok(()) - } - - fn has_broken(&self, _conn: &mut Connection) -> bool { - false - } -} diff --git a/src/sql/pool.rs b/src/sql/pool.rs new file mode 100644 index 000000000..477b7d57f --- /dev/null +++ b/src/sql/pool.rs @@ -0,0 +1,105 @@ +//! Connection pool. + +use std::fmt; +use std::ops::{Deref, DerefMut}; +use std::sync::{Arc, Weak}; + +use parking_lot::{Condvar, Mutex}; +use rusqlite::Connection; + +/// Inner connection pool. +struct InnerPool { + /// Available connections. + connections: Mutex>, + + /// Conditional variable to notify about added connections. + /// + /// Used to wait for available connection when the pool is empty. + cond: Condvar, +} + +impl InnerPool { + /// Puts a connection into the pool. + /// + /// The connection could be new or returned back. + fn put(&self, connection: Connection) { + let mut connections = self.connections.lock(); + connections.push(connection); + drop(connections); + self.cond.notify_one(); + } +} + +/// Pooled connection. +pub struct PooledConnection { + /// Weak reference to the pool used to return the connection back. + pool: Weak, + + /// Only `None` right after moving the connection back to the pool. + conn: Option, +} + +impl Drop for PooledConnection { + fn drop(&mut self) { + // Put the connection back unless the pool is already dropped. + if let Some(pool) = self.pool.upgrade() { + if let Some(conn) = self.conn.take() { + pool.put(conn); + } + } + } +} + +impl Deref for PooledConnection { + type Target = Connection; + + fn deref(&self) -> &Connection { + self.conn.as_ref().unwrap() + } +} + +impl DerefMut for PooledConnection { + fn deref_mut(&mut self) -> &mut Connection { + self.conn.as_mut().unwrap() + } +} + +/// Connection pool. +#[derive(Clone)] +pub struct Pool { + /// Reference to the actual connection pool. + inner: Arc, +} + +impl fmt::Debug for Pool { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + write!(fmt, "Pool") + } +} + +impl Pool { + /// Creates a new connection pool. + pub fn new(connections: Vec) -> Self { + let inner = Arc::new(InnerPool { + connections: Mutex::new(connections), + cond: Condvar::new(), + }); + Pool { inner } + } + + /// Retrieves a connection from the pool. + pub fn get(&self) -> PooledConnection { + let mut connections = self.inner.connections.lock(); + + loop { + if let Some(conn) = connections.pop() { + return PooledConnection { + pool: Arc::downgrade(&self.inner), + conn: Some(conn), + }; + } + + self.inner.cond.wait(&mut connections); + } + } +} From e88f21c010fada6c8f59acce1e73055881bfc46d Mon Sep 17 00:00:00 2001 From: link2xt Date: Sat, 18 Feb 2023 01:57:19 +0000 Subject: [PATCH 07/24] Remove `try_many_times` r2d2 bug workaround --- src/accounts.rs | 36 ++++-------------------------------- 1 file changed, 4 insertions(+), 32 deletions(-) diff --git a/src/accounts.rs b/src/accounts.rs index e01cc49c2..8ec3a34c8 100644 --- a/src/accounts.rs +++ b/src/accounts.rs @@ -1,7 +1,6 @@ //! # Account manager module. use std::collections::BTreeMap; -use std::future::Future; use std::path::{Path, PathBuf}; use anyhow::{ensure, Context as _, Result}; @@ -151,7 +150,7 @@ impl Accounts { if let Some(cfg) = self.config.get_account(id) { let account_path = self.dir.join(cfg.dir); - try_many_times(|| fs::remove_dir_all(&account_path)) + fs::remove_dir_all(&account_path) .await .context("failed to remove account data")?; } @@ -187,10 +186,10 @@ impl Accounts { fs::create_dir_all(self.dir.join(&account_config.dir)) .await .context("failed to create dir")?; - try_many_times(|| fs::rename(&dbfile, &new_dbfile)) + fs::rename(&dbfile, &new_dbfile) .await .context("failed to rename dbfile")?; - try_many_times(|| fs::rename(&blobdir, &new_blobdir)) + fs::rename(&blobdir, &new_blobdir) .await .context("failed to rename blobdir")?; if walfile.exists() { @@ -215,7 +214,7 @@ impl Accounts { } Err(err) => { let account_path = std::path::PathBuf::from(&account_config.dir); - try_many_times(|| fs::remove_dir_all(&account_path)) + fs::remove_dir_all(&account_path) .await .context("failed to remove account data")?; self.config.remove_account(account_config.id).await?; @@ -472,33 +471,6 @@ impl Config { } } -/// Spend up to 1 minute trying to do the operation. -/// -/// Files may remain locked up to 30 seconds due to r2d2 bug: -/// -async fn try_many_times(f: F) -> std::result::Result<(), T> -where - F: Fn() -> Fut, - Fut: Future>, -{ - let mut counter = 0; - loop { - counter += 1; - - if let Err(err) = f().await { - if counter > 60 { - return Err(err); - } - - // Wait 1 second and try again. - tokio::time::sleep(std::time::Duration::from_millis(1000)).await; - } else { - break; - } - } - Ok(()) -} - /// Configuration of a single account. #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] struct AccountConfig { From f2b05ccc291b8518e32f32f45e9dec018ece345b Mon Sep 17 00:00:00 2001 From: link2xt Date: Sun, 19 Feb 2023 01:08:17 +0000 Subject: [PATCH 08/24] Reimplement connection pool on top of crossbeam --- Cargo.lock | 2 +- Cargo.toml | 2 +- src/sql.rs | 2 +- src/sql/pool.rs | 30 +++++++++++++++--------------- 4 files changed, 18 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b18e6b955..7da76f42e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -907,6 +907,7 @@ dependencies = [ "bitflags", "chrono", "criterion", + "crossbeam-queue", "deltachat_derive", "email", "encoded-words", @@ -928,7 +929,6 @@ dependencies = [ "num-traits", "num_cpus", "once_cell", - "parking_lot", "percent-encoding", "pgp", "pretty_env_logger", diff --git a/Cargo.toml b/Cargo.toml index 916aa8eaa..5f928ff98 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -39,6 +39,7 @@ backtrace = "0.3" base64 = "0.21" bitflags = "1.3" chrono = { version = "0.4", default-features=false, features = ["clock", "std"] } +crossbeam-queue = "0.3" email = { git = "https://github.com/deltachat/rust-email", branch = "master" } encoded-words = { git = "https://github.com/async-email/encoded-words", branch = "master" } escaper = "0.1" @@ -54,7 +55,6 @@ num_cpus = "1.15" num-derive = "0.3" num-traits = "0.2" once_cell = "1.17.0" -parking_lot = "0.12" percent-encoding = "2.2" pgp = { version = "0.9", default-features = false } pretty_env_logger = { version = "0.4", optional = true } diff --git a/src/sql.rs b/src/sql.rs index 718fa5cee..32f1cdff2 100644 --- a/src/sql.rs +++ b/src/sql.rs @@ -363,7 +363,7 @@ impl Sql { pub(crate) async fn get_conn(&self) -> Result { let lock = self.pool.read().await; let pool = lock.as_ref().context("no SQL connection")?; - let conn = pool.get(); + let conn = pool.get().await; Ok(conn) } diff --git a/src/sql/pool.rs b/src/sql/pool.rs index 477b7d57f..8e8f9602c 100644 --- a/src/sql/pool.rs +++ b/src/sql/pool.rs @@ -4,18 +4,19 @@ use std::fmt; use std::ops::{Deref, DerefMut}; use std::sync::{Arc, Weak}; -use parking_lot::{Condvar, Mutex}; +use crossbeam_queue::ArrayQueue; use rusqlite::Connection; +use tokio::sync::Notify; /// Inner connection pool. struct InnerPool { /// Available connections. - connections: Mutex>, + connections: ArrayQueue, - /// Conditional variable to notify about added connections. + /// Notifies about added connections. /// /// Used to wait for available connection when the pool is empty. - cond: Condvar, + notify: Notify, } impl InnerPool { @@ -23,10 +24,8 @@ impl InnerPool { /// /// The connection could be new or returned back. fn put(&self, connection: Connection) { - let mut connections = self.connections.lock(); - connections.push(connection); - drop(connections); - self.cond.notify_one(); + self.connections.force_push(connection); + self.notify.notify_one(); } } @@ -81,25 +80,26 @@ impl Pool { /// Creates a new connection pool. pub fn new(connections: Vec) -> Self { let inner = Arc::new(InnerPool { - connections: Mutex::new(connections), - cond: Condvar::new(), + connections: ArrayQueue::new(connections.len()), + notify: Notify::new(), }); + for connection in connections { + inner.connections.force_push(connection); + } Pool { inner } } /// Retrieves a connection from the pool. - pub fn get(&self) -> PooledConnection { - let mut connections = self.inner.connections.lock(); - + pub async fn get(&self) -> PooledConnection { loop { - if let Some(conn) = connections.pop() { + if let Some(conn) = self.inner.connections.pop() { return PooledConnection { pool: Arc::downgrade(&self.inner), conn: Some(conn), }; } - self.inner.cond.wait(&mut connections); + self.inner.notify.notified().await; } } } From 75f65b06e868355ae5e61c9621d0a829c9ff4ad3 Mon Sep 17 00:00:00 2001 From: link2xt Date: Sun, 19 Feb 2023 02:07:56 +0000 Subject: [PATCH 09/24] Run VACUUM on the same connection as sqlcipher_export Otherwise export_and_import_backup test fails due to SQLITE_SCHEMA error. --- src/imex.rs | 40 +++++++++++++++++++++++++++------------- src/sql.rs | 25 ------------------------- 2 files changed, 27 insertions(+), 38 deletions(-) diff --git a/src/imex.rs b/src/imex.rs index cbe91658d..f23496753 100644 --- a/src/imex.rs +++ b/src/imex.rs @@ -5,7 +5,7 @@ use std::ffi::OsStr; use std::path::{Path, PathBuf}; use ::pgp::types::KeyTrait; -use anyhow::{bail, ensure, format_err, Context as _, Result}; +use anyhow::{bail, ensure, format_err, Context as _, Error, Result}; use futures::StreamExt; use futures_lite::FutureExt; use rand::{thread_rng, Rng}; @@ -508,6 +508,9 @@ fn get_next_backup_path(folder: &Path, backup_time: i64) -> Result<(PathBuf, Pat bail!("could not create backup file, disk full?"); } +/// Exports the database to a separate file with the given passphrase. +/// +/// Set passphrase to empty string to export the database unencrypted. async fn export_backup(context: &Context, dir: &Path, passphrase: String) -> Result<()> { // get a fine backup file name (the name includes the date so that multiple backup instances are possible) let now = time(); @@ -521,13 +524,6 @@ async fn export_backup(context: &Context, dir: &Path, passphrase: String) -> Res .await?; sql::housekeeping(context).await.ok_or_log(context); - context - .sql - .execute("VACUUM;", paramsv![]) - .await - .map_err(|e| warn!(context, "Vacuum failed, exporting anyway {}", e)) - .ok(); - ensure!( context.scheduler.read().await.is_none(), "cannot export backup, IO is running" @@ -540,11 +536,29 @@ async fn export_backup(context: &Context, dir: &Path, passphrase: String) -> Res dest_path.display(), ); - context - .sql - .export(&temp_db_path, passphrase) - .await - .with_context(|| format!("failed to backup plaintext database to {temp_db_path:?}"))?; + let path_str = temp_db_path + .to_str() + .with_context(|| format!("path {temp_db_path:?} is not valid unicode"))?; + + let conn = context.sql.get_conn().await?; + tokio::task::block_in_place(move || { + if let Err(err) = conn.execute("VACUUM", params![]) { + info!(context, "Vacuum failed, exporting anyway: {:#}.", err); + } + conn.execute( + "ATTACH DATABASE ? AS backup KEY ?", + paramsv![path_str, passphrase], + ) + .context("failed to attach backup database")?; + let res = conn + .query_row("SELECT sqlcipher_export('backup')", [], |_row| Ok(())) + .context("failed to export to attached backup database"); + conn.execute("DETACH DATABASE backup", []) + .context("failed to detach backup database")?; + res?; + + Ok::<_, Error>(()) + })?; let res = export_backup_inner(context, &temp_db_path, &temp_path).await; diff --git a/src/sql.rs b/src/sql.rs index 32f1cdff2..f6fe25418 100644 --- a/src/sql.rs +++ b/src/sql.rs @@ -124,31 +124,6 @@ impl Sql { // drop closes the connection } - /// Exports the database to a separate file with the given passphrase. - /// - /// Set passphrase to empty string to export the database unencrypted. - pub(crate) async fn export(&self, path: &Path, passphrase: String) -> Result<()> { - let path_str = path - .to_str() - .with_context(|| format!("path {path:?} is not valid unicode"))?; - let conn = self.get_conn().await?; - tokio::task::block_in_place(move || { - conn.execute( - "ATTACH DATABASE ? AS backup KEY ?", - paramsv![path_str, passphrase], - ) - .context("failed to attach backup database")?; - let res = conn - .query_row("SELECT sqlcipher_export('backup')", [], |_row| Ok(())) - .context("failed to export to attached backup database"); - conn.execute("DETACH DATABASE backup", []) - .context("failed to detach backup database")?; - res?; - - Ok(()) - }) - } - /// Imports the database from a separate file with the given passphrase. pub(crate) async fn import(&self, path: &Path, passphrase: String) -> Result<()> { let path_str = path From 85517abf58fa850cafff9f62f821b37b15ae776b Mon Sep 17 00:00:00 2001 From: link2xt Date: Sun, 19 Feb 2023 02:18:45 +0000 Subject: [PATCH 10/24] Derive Debug for Pool --- src/sql/pool.rs | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/src/sql/pool.rs b/src/sql/pool.rs index 8e8f9602c..133c72ae2 100644 --- a/src/sql/pool.rs +++ b/src/sql/pool.rs @@ -1,6 +1,5 @@ //! Connection pool. -use std::fmt; use std::ops::{Deref, DerefMut}; use std::sync::{Arc, Weak}; @@ -9,6 +8,7 @@ use rusqlite::Connection; use tokio::sync::Notify; /// Inner connection pool. +#[derive(Debug)] struct InnerPool { /// Available connections. connections: ArrayQueue, @@ -64,18 +64,12 @@ impl DerefMut for PooledConnection { } /// Connection pool. -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct Pool { /// Reference to the actual connection pool. inner: Arc, } -impl fmt::Debug for Pool { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - write!(fmt, "Pool") - } -} - impl Pool { /// Creates a new connection pool. pub fn new(connections: Vec) -> Self { From 626ec5e793f8ff978432cddd95ee8219378ed637 Mon Sep 17 00:00:00 2001 From: link2xt Date: Sun, 19 Feb 2023 13:05:39 +0000 Subject: [PATCH 11/24] Document the meaning of empty Message.subject --- src/chat.rs | 2 +- src/message.rs | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/chat.rs b/src/chat.rs index cc217af15..dd88cdd30 100644 --- a/src/chat.rs +++ b/src/chat.rs @@ -3397,7 +3397,7 @@ pub async fn forward_msgs(context: &Context, msg_ids: &[MsgId], chat_id: ChatId) msg.param.remove(Param::WebxdcSummaryTimestamp); msg.in_reply_to = None; - // do not leak data as group names; a default subject is generated by mimfactory + // do not leak data as group names; a default subject is generated by mimefactory msg.subject = "".to_string(); let new_msg_id: MsgId; diff --git a/src/message.rs b/src/message.rs index 44d6c1194..688676253 100644 --- a/src/message.rs +++ b/src/message.rs @@ -265,6 +265,8 @@ pub struct Message { pub(crate) text: Option, /// Message subject. + /// + /// If empty, a default subject will be generated when sending. pub(crate) subject: String, /// `Message-ID` header value. From f65e1c1587c168f89f96188734228d4f0aae6144 Mon Sep 17 00:00:00 2001 From: link2xt Date: Sun, 19 Feb 2023 13:18:58 +0000 Subject: [PATCH 12/24] Sort dependencies in Cargo.toml --- Cargo.toml | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 5f928ff98..b312ab9a3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,12 +29,11 @@ format-flowed = { path = "./format-flowed" } ratelimit = { path = "./deltachat-ratelimit" } anyhow = "1" +async-channel = "1.8.0" async-imap = { git = "https://github.com/async-email/async-imap", branch = "master", default-features = false, features = ["runtime-tokio"] } async-native-tls = { version = "0.4", default-features = false, features = ["runtime-tokio"] } async-smtp = { version = "0.8", default-features = false, features = ["runtime-tokio"] } -trust-dns-resolver = "0.22" -tokio = { version = "1", features = ["fs", "rt-multi-thread", "macros"] } -tokio-tar = { version = "0.3" } # TODO: integrate tokio into async-tar +async_zip = { version = "0.0.9", default-features = false, features = ["deflate"] } backtrace = "0.3" base64 = "0.21" bitflags = "1.3" @@ -43,8 +42,11 @@ crossbeam-queue = "0.3" email = { git = "https://github.com/deltachat/rust-email", branch = "master" } encoded-words = { git = "https://github.com/async-email/encoded-words", branch = "master" } escaper = "0.1" +fast-socks5 = "0.8" futures = "0.3" +futures-lite = "1.12.0" hex = "0.4.0" +humansize = "2" image = { version = "0.24.5", default-features=false, features = ["gif", "jpeg", "ico", "png", "pnm", "webp", "bmp"] } kamadak-exif = "0.5" lettre_email = { git = "https://github.com/deltachat/lettre", branch = "master" } @@ -58,9 +60,11 @@ once_cell = "1.17.0" percent-encoding = "2.2" pgp = { version = "0.9", default-features = false } pretty_env_logger = { version = "0.4", optional = true } +qrcodegen = "1.7.0" quick-xml = "0.27" rand = "0.8" regex = "1.7" +reqwest = { version = "0.11.14", features = ["json"] } rusqlite = { version = "0.28", features = ["sqlcipher"] } rust-hsluv = "0.1" sanitize-filename = "0.4" @@ -71,21 +75,17 @@ sha2 = "0.10" smallvec = "1" strum = "0.24" strum_macros = "0.24" -thiserror = "1" -toml = "0.7" -url = "2" -uuid = { version = "1", features = ["serde", "v4"] } -fast-socks5 = "0.8" -humansize = "2" -qrcodegen = "1.7.0" tagger = "4.3.4" textwrap = "0.16.0" -async-channel = "1.8.0" -futures-lite = "1.12.0" -tokio-stream = { version = "0.1.11", features = ["fs"] } +thiserror = "1" tokio-io-timeout = "1.2.0" -reqwest = { version = "0.11.14", features = ["json"] } -async_zip = { version = "0.0.9", default-features = false, features = ["deflate"] } +tokio-stream = { version = "0.1.11", features = ["fs"] } +tokio-tar = { version = "0.3" } # TODO: integrate tokio into async-tar +tokio = { version = "1", features = ["fs", "rt-multi-thread", "macros"] } +toml = "0.7" +trust-dns-resolver = "0.22" +url = "2" +uuid = { version = "1", features = ["serde", "v4"] } [dev-dependencies] ansi_term = "0.12.0" From 641d102abad02d8e4bdc00a7c9fe1f7f2e9ac8bc Mon Sep 17 00:00:00 2001 From: Hocuri Date: Sun, 19 Feb 2023 14:42:39 +0100 Subject: [PATCH 13/24] Add dc_msg_set_subject() C FFI (#4057) --- CHANGELOG.md | 1 + deltachat-ffi/deltachat.h | 12 ++++++++++++ deltachat-ffi/src/lib.rs | 10 ++++++++++ src/message.rs | 6 ++++++ src/mimefactory.rs | 16 ++++++++++++++++ 5 files changed, 45 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c645f3c8c..5eabb8177 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ - Remove `MimeMessage::from_bytes()` public interface. #4033 - BREAKING Types: jsonrpc: `get_messages` now returns a map with `MessageLoadResult` instead of failing completely if one of the requested messages could not be loaded. #4038 +- Add dc_msg_set_subject() C-FFI #4057 ## 1.108.0 diff --git a/deltachat-ffi/deltachat.h b/deltachat-ffi/deltachat.h index a24b500df..6148d4198 100644 --- a/deltachat-ffi/deltachat.h +++ b/deltachat-ffi/deltachat.h @@ -4327,6 +4327,18 @@ void dc_msg_set_text (dc_msg_t* msg, const char* text); void dc_msg_set_html (dc_msg_t* msg, const char* html); +/** + * Sets the email's subject. If it's empty, a default subject + * will be used (e.g. `Message from Alice` or `Re: `). + * This does not alter any information in the database. + * + * @memberof dc_msg_t + * @param msg The message object. + * @param subject The new subject. + */ +void dc_msg_set_subject (dc_msg_t* msg, const char* subject); + + /** * Set different sender name for a message. * This overrides the name set by the dc_set_config()-option `displayname`. diff --git a/deltachat-ffi/src/lib.rs b/deltachat-ffi/src/lib.rs index c7cd23bd2..1866a367f 100644 --- a/deltachat-ffi/src/lib.rs +++ b/deltachat-ffi/src/lib.rs @@ -3598,6 +3598,16 @@ pub unsafe extern "C" fn dc_msg_set_html(msg: *mut dc_msg_t, html: *const libc:: ffi_msg.message.set_html(to_opt_string_lossy(html)) } +#[no_mangle] +pub unsafe extern "C" fn dc_msg_set_subject(msg: *mut dc_msg_t, subject: *const libc::c_char) { + if msg.is_null() { + eprintln!("ignoring careless call to dc_msg_get_subject()"); + return; + } + let ffi_msg = &mut *msg; + ffi_msg.message.set_subject(to_string_lossy(subject)); +} + #[no_mangle] pub unsafe extern "C" fn dc_msg_set_override_sender_name( msg: *mut dc_msg_t, diff --git a/src/message.rs b/src/message.rs index 688676253..e4bae9a67 100644 --- a/src/message.rs +++ b/src/message.rs @@ -797,6 +797,12 @@ impl Message { self.text = text; } + /// Sets the email's subject. If it's empty, a default subject + /// will be used (e.g. `Message from Alice` or `Re: `). + pub fn set_subject(&mut self, subject: String) { + self.subject = subject; + } + /// Sets the file associated with a message. /// /// This function does not use the file or check if it exists, diff --git a/src/mimefactory.rs b/src/mimefactory.rs index 2672c203d..43978c112 100644 --- a/src/mimefactory.rs +++ b/src/mimefactory.rs @@ -1612,6 +1612,22 @@ mod tests { assert_eq!(maybe_encode_words("äöü"), "=?utf-8?b?w6TDtsO8?="); } + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + async fn test_manually_set_subject() -> Result<()> { + let t = TestContext::new_alice().await; + let chat = t.create_chat_with_contact("bob", "bob@example.org").await; + + let mut msg = Message::new(Viewtype::Text); + msg.set_subject("Subjeeeeect".to_string()); + + let sent_msg = t.send_msg(chat.id, &mut msg).await; + let payload = sent_msg.payload(); + + assert_eq!(payload.match_indices("Subject: Subjeeeeect").count(), 1); + + Ok(()) + } + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] async fn test_subject_from_mua() { // 1.: Receive a mail from an MUA From 609fc67f0db89c92d8d1cfa7b20f272813dfa9b7 Mon Sep 17 00:00:00 2001 From: Simon Laux Date: Sun, 19 Feb 2023 14:57:00 +0100 Subject: [PATCH 14/24] fix websocket server & add ci test for building it (#4047) the axum update broke the websocket server, because yerpc still uses a the old 5.9 version. So I needed to downgrade the dependency until https://github.com/deltachat/yerpc/pull/35 is merged. I want to retry the kaiOS client experiment, for this I need a working websocket server binary. --- .github/workflows/jsonrpc.yml | 4 ++ Cargo.lock | 117 ++-------------------------------- deltachat-jsonrpc/Cargo.toml | 2 +- 3 files changed, 11 insertions(+), 112 deletions(-) diff --git a/.github/workflows/jsonrpc.yml b/.github/workflows/jsonrpc.yml index de05f50d6..692150cd7 100644 --- a/.github/workflows/jsonrpc.yml +++ b/.github/workflows/jsonrpc.yml @@ -35,6 +35,10 @@ jobs: npm run test env: DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }} + - name: make sure websocket server version still builds + run: | + cd deltachat-jsonrpc + cargo build --bin deltachat-jsonrpc-server --features webserver - name: Run linter run: | cd deltachat-jsonrpc/typescript diff --git a/Cargo.lock b/Cargo.lock index 7da76f42e..be011af3a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -239,7 +239,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acee9fd5073ab6b045a275b3e709c163dd36c90685219cb21804a147b58dba43" dependencies = [ "async-trait", - "axum-core 0.2.9", + "axum-core", "base64 0.13.1", "bitflags", "bytes", @@ -248,7 +248,7 @@ dependencies = [ "http-body", "hyper", "itoa", - "matchit 0.5.0", + "matchit", "memchr", "mime", "percent-encoding", @@ -259,43 +259,7 @@ dependencies = [ "sha-1", "sync_wrapper", "tokio", - "tokio-tungstenite 0.17.2", - "tower", - "tower-http", - "tower-layer", - "tower-service", -] - -[[package]] -name = "axum" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5694b64066a2459918d8074c2ce0d5a88f409431994c2356617c8ae0c4721fc" -dependencies = [ - "async-trait", - "axum-core 0.3.2", - "base64 0.20.0", - "bitflags", - "bytes", - "futures-util", - "http", - "http-body", - "hyper", - "itoa", - "matchit 0.7.0", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite", - "rustversion", - "serde", - "serde_json", - "serde_path_to_error", - "serde_urlencoded", - "sha1", - "sync_wrapper", - "tokio", - "tokio-tungstenite 0.18.0", + "tokio-tungstenite", "tower", "tower-http", "tower-layer", @@ -318,23 +282,6 @@ dependencies = [ "tower-service", ] -[[package]] -name = "axum-core" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cae3e661676ffbacb30f1a824089a8c9150e71017f7e1e38f2aa32009188d34" -dependencies = [ - "async-trait", - "bytes", - "futures-util", - "http", - "http-body", - "mime", - "rustversion", - "tower-layer", - "tower-service", -] - [[package]] name = "backtrace" version = "0.3.67" @@ -368,12 +315,6 @@ version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" -[[package]] -name = "base64" -version = "0.20.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ea22880d78093b0cbe17c89f64a7d457941e65759157ec6cb31a31d652b05e5" - [[package]] name = "base64" version = "0.21.0" @@ -969,7 +910,7 @@ version = "1.108.0" dependencies = [ "anyhow", "async-channel", - "axum 0.6.4", + "axum", "deltachat", "env_logger 0.10.0", "futures", @@ -2158,12 +2099,6 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73cbba799671b762df5a175adf59ce145165747bb891505c43d09aefbbf38beb" -[[package]] -name = "matchit" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b87248edafb776e59e6ee64a79086f65890d3510f2c656c000bf2a7e8a0aea40" - [[package]] name = "md-5" version = "0.10.5" @@ -3220,15 +3155,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_path_to_error" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "184c643044780f7ceb59104cef98a5a6f12cb2288a7bc701ab93a362b49fd47d" -dependencies = [ - "serde", -] - [[package]] name = "serde_spanned" version = "0.6.1" @@ -3638,19 +3564,7 @@ dependencies = [ "futures-util", "log", "tokio", - "tungstenite 0.17.3", -] - -[[package]] -name = "tokio-tungstenite" -version = "0.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54319c93411147bced34cb5609a80e0a8e44c5999c93903a81cd866630ec0bfd" -dependencies = [ - "futures-util", - "log", - "tokio", - "tungstenite 0.18.0", + "tungstenite", ] [[package]] @@ -3860,25 +3774,6 @@ dependencies = [ "utf-8", ] -[[package]] -name = "tungstenite" -version = "0.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30ee6ab729cd4cf0fd55218530c4522ed30b7b6081752839b68fcec8d0960788" -dependencies = [ - "base64 0.13.1", - "byteorder", - "bytes", - "http", - "httparse", - "log", - "rand 0.8.5", - "sha1", - "thiserror", - "url", - "utf-8", -] - [[package]] name = "twofish" version = "0.7.1" @@ -4339,7 +4234,7 @@ dependencies = [ "async-channel", "async-mutex", "async-trait", - "axum 0.5.17", + "axum", "futures", "futures-util", "log", diff --git a/deltachat-jsonrpc/Cargo.toml b/deltachat-jsonrpc/Cargo.toml index 0ca66a9e5..27c4c1a95 100644 --- a/deltachat-jsonrpc/Cargo.toml +++ b/deltachat-jsonrpc/Cargo.toml @@ -28,7 +28,7 @@ sanitize-filename = "0.4" walkdir = "2.3.2" # optional dependencies -axum = { version = "0.6.4", optional = true, features = ["ws"] } +axum = { version = "0.5.9", optional = true, features = ["ws"] } env_logger = { version = "0.10.0", optional = true } [dev-dependencies] From 10066b2bc79ec0a902e41280e85048a8f59d569d Mon Sep 17 00:00:00 2001 From: link2xt Date: Sun, 19 Feb 2023 17:04:18 +0000 Subject: [PATCH 15/24] sql: use semaphore to limit access to the connection pool This ensures that if multiple connections are returned to the pool at the same time, waiters get them in the order they were placed in the queue. --- CHANGELOG.md | 1 + src/sql.rs | 2 +- src/sql/pool.rs | 39 +++++++++++++++++++++------------------ 3 files changed, 23 insertions(+), 19 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5eabb8177..a3b24e038 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,7 @@ - deltachat-rpc-server: do not block stdin while processing the request. #4041 deltachat-rpc-server now reads the next request as soon as previous request handler is spawned. - enable `auto_vacuum` on all SQL connections #2955 +- use semaphore for connection pool #4061 ### API-Changes diff --git a/src/sql.rs b/src/sql.rs index f6fe25418..6d6a53fec 100644 --- a/src/sql.rs +++ b/src/sql.rs @@ -338,7 +338,7 @@ impl Sql { pub(crate) async fn get_conn(&self) -> Result { let lock = self.pool.read().await; let pool = lock.as_ref().context("no SQL connection")?; - let conn = pool.get().await; + let conn = pool.get().await?; Ok(conn) } diff --git a/src/sql/pool.rs b/src/sql/pool.rs index 133c72ae2..fc7bf05bf 100644 --- a/src/sql/pool.rs +++ b/src/sql/pool.rs @@ -3,9 +3,10 @@ use std::ops::{Deref, DerefMut}; use std::sync::{Arc, Weak}; +use anyhow::{Context, Result}; use crossbeam_queue::ArrayQueue; use rusqlite::Connection; -use tokio::sync::Notify; +use tokio::sync::{OwnedSemaphorePermit, Semaphore}; /// Inner connection pool. #[derive(Debug)] @@ -13,10 +14,8 @@ struct InnerPool { /// Available connections. connections: ArrayQueue, - /// Notifies about added connections. - /// - /// Used to wait for available connection when the pool is empty. - notify: Notify, + /// Counts the number of available connections. + semaphore: Arc, } impl InnerPool { @@ -25,7 +24,6 @@ impl InnerPool { /// The connection could be new or returned back. fn put(&self, connection: Connection) { self.connections.force_push(connection); - self.notify.notify_one(); } } @@ -36,6 +34,9 @@ pub struct PooledConnection { /// Only `None` right after moving the connection back to the pool. conn: Option, + + /// Semaphore permit, dropped after returning the connection to the pool. + _permit: OwnedSemaphorePermit, } impl Drop for PooledConnection { @@ -75,7 +76,7 @@ impl Pool { pub fn new(connections: Vec) -> Self { let inner = Arc::new(InnerPool { connections: ArrayQueue::new(connections.len()), - notify: Notify::new(), + semaphore: Arc::new(Semaphore::new(connections.len())), }); for connection in connections { inner.connections.force_push(connection); @@ -84,16 +85,18 @@ impl Pool { } /// Retrieves a connection from the pool. - pub async fn get(&self) -> PooledConnection { - loop { - if let Some(conn) = self.inner.connections.pop() { - return PooledConnection { - pool: Arc::downgrade(&self.inner), - conn: Some(conn), - }; - } - - self.inner.notify.notified().await; - } + pub async fn get(&self) -> Result { + let permit = self.inner.semaphore.clone().acquire_owned().await?; + let conn = self + .inner + .connections + .pop() + .context("got a permit when there are no connections in the pool")?; + let conn = PooledConnection { + pool: Arc::downgrade(&self.inner), + conn: Some(conn), + _permit: permit, + }; + Ok(conn) } } From 44953d6bcc13476400d3c2de71893f94d3df8983 Mon Sep 17 00:00:00 2001 From: link2xt Date: Sun, 19 Feb 2023 16:19:05 +0000 Subject: [PATCH 16/24] Release 1.109.0 --- CHANGELOG.md | 14 +++++++------- Cargo.lock | 10 +++++----- Cargo.toml | 2 +- deltachat-ffi/Cargo.toml | 2 +- deltachat-jsonrpc/Cargo.toml | 2 +- deltachat-jsonrpc/typescript/package.json | 2 +- deltachat-repl/Cargo.toml | 2 +- deltachat-rpc-server/Cargo.toml | 2 +- package.json | 2 +- 9 files changed, 19 insertions(+), 19 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a3b24e038..a05e98442 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,24 +1,24 @@ # Changelog -## Unreleased +## 1.109.0 ### Changes - deltachat-rpc-client: use `dataclass` for `Account`, `Chat`, `Contact` and `Message` #4042 -- python: mark bindings as supporting typing according to PEP 561 #4045 -- retry filesystem operations during account migration #4043 -- replace `r2d2` and `r2d2_sqlite` dependencies with an own connection pool #4050 #4053 ### Fixes - deltachat-rpc-server: do not block stdin while processing the request. #4041 deltachat-rpc-server now reads the next request as soon as previous request handler is spawned. -- enable `auto_vacuum` on all SQL connections #2955 -- use semaphore for connection pool #4061 +- Enable `auto_vacuum` on all SQL connections. #2955 +- Replace `r2d2` connection pool with an own implementation. #4050 #4053 #4043 #4061 + This change improves reliability + by closing all database connections immediately when the context is closed. ### API-Changes - Remove `MimeMessage::from_bytes()` public interface. #4033 - BREAKING Types: jsonrpc: `get_messages` now returns a map with `MessageLoadResult` instead of failing completely if one of the requested messages could not be loaded. #4038 -- Add dc_msg_set_subject() C-FFI #4057 +- Add `dc_msg_set_subject()`. C-FFI #4057 +- Mark python bindings as supporting typing according to PEP 561 #4045 ## 1.108.0 diff --git a/Cargo.lock b/Cargo.lock index be011af3a..39890839c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -834,7 +834,7 @@ checksum = "23d8666cb01533c39dde32bcbab8e227b4ed6679b2c925eba05feabea39508fb" [[package]] name = "deltachat" -version = "1.108.0" +version = "1.109.0" dependencies = [ "ansi_term", "anyhow", @@ -906,7 +906,7 @@ dependencies = [ [[package]] name = "deltachat-jsonrpc" -version = "1.108.0" +version = "1.109.0" dependencies = [ "anyhow", "async-channel", @@ -928,7 +928,7 @@ dependencies = [ [[package]] name = "deltachat-repl" -version = "1.108.0" +version = "1.109.0" dependencies = [ "ansi_term", "anyhow", @@ -943,7 +943,7 @@ dependencies = [ [[package]] name = "deltachat-rpc-server" -version = "1.108.0" +version = "1.109.0" dependencies = [ "anyhow", "deltachat-jsonrpc", @@ -966,7 +966,7 @@ dependencies = [ [[package]] name = "deltachat_ffi" -version = "1.108.0" +version = "1.109.0" dependencies = [ "anyhow", "deltachat", diff --git a/Cargo.toml b/Cargo.toml index b312ab9a3..9f85cf1f1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "deltachat" -version = "1.108.0" +version = "1.109.0" edition = "2021" license = "MPL-2.0" rust-version = "1.63" diff --git a/deltachat-ffi/Cargo.toml b/deltachat-ffi/Cargo.toml index 1e0891cea..2bbec1c1f 100644 --- a/deltachat-ffi/Cargo.toml +++ b/deltachat-ffi/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "deltachat_ffi" -version = "1.108.0" +version = "1.109.0" description = "Deltachat FFI" edition = "2018" readme = "README.md" diff --git a/deltachat-jsonrpc/Cargo.toml b/deltachat-jsonrpc/Cargo.toml index 27c4c1a95..2ad9f2b83 100644 --- a/deltachat-jsonrpc/Cargo.toml +++ b/deltachat-jsonrpc/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "deltachat-jsonrpc" -version = "1.108.0" +version = "1.109.0" description = "DeltaChat JSON-RPC API" edition = "2021" default-run = "deltachat-jsonrpc-server" diff --git a/deltachat-jsonrpc/typescript/package.json b/deltachat-jsonrpc/typescript/package.json index e93c1f6b3..039057017 100644 --- a/deltachat-jsonrpc/typescript/package.json +++ b/deltachat-jsonrpc/typescript/package.json @@ -48,5 +48,5 @@ }, "type": "module", "types": "dist/deltachat.d.ts", - "version": "1.108.0" + "version": "1.109.0" } diff --git a/deltachat-repl/Cargo.toml b/deltachat-repl/Cargo.toml index 0b132762f..59e3a3774 100644 --- a/deltachat-repl/Cargo.toml +++ b/deltachat-repl/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "deltachat-repl" -version = "1.108.0" +version = "1.109.0" edition = "2021" [dependencies] diff --git a/deltachat-rpc-server/Cargo.toml b/deltachat-rpc-server/Cargo.toml index f678e2cb0..619841fc9 100644 --- a/deltachat-rpc-server/Cargo.toml +++ b/deltachat-rpc-server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "deltachat-rpc-server" -version = "1.108.0" +version = "1.109.0" description = "DeltaChat JSON-RPC server" edition = "2021" readme = "README.md" diff --git a/package.json b/package.json index e028cd28b..9045c36e2 100644 --- a/package.json +++ b/package.json @@ -60,5 +60,5 @@ "test:mocha": "mocha -r esm node/test/test.js --growl --reporter=spec --bail --exit" }, "types": "node/dist/index.d.ts", - "version": "1.108.0" + "version": "1.109.0" } \ No newline at end of file From 42a7e91f05414a137f40f99c401ae8d0513f1ffe Mon Sep 17 00:00:00 2001 From: link2xt Date: Thu, 16 Feb 2023 12:46:39 +0000 Subject: [PATCH 17/24] python: stop using pytest-rerunfailures --- python/tox.ini | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/python/tox.ini b/python/tox.ini index 17905738f..6a50005c5 100644 --- a/python/tox.ini +++ b/python/tox.ini @@ -8,7 +8,7 @@ envlist = [testenv] commands = - pytest -n6 --extra-info --reruns 2 --reruns-delay 5 -v -rsXx --ignored --strict-tls {posargs: tests examples} + pytest -n6 --extra-info -v -rsXx --ignored --strict-tls {posargs: tests examples} pip wheel . -w {toxworkdir}/wheelhouse --no-deps setenv = # Avoid stack overflow when Rust core is built without optimizations. @@ -21,7 +21,6 @@ passenv = RUSTC_WRAPPER deps = pytest - pytest-rerunfailures pytest-timeout pytest-xdist pdbpp From 5bdd49b45185105da599c55990a9d4934c82675a Mon Sep 17 00:00:00 2001 From: link2xt Date: Sun, 19 Feb 2023 21:57:36 +0000 Subject: [PATCH 18/24] Add Unreleased section to changelog --- CHANGELOG.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a05e98442..e3ef2a58a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +## Unreleased + +### Changes + +### Fixes + +### API-Changes + + ## 1.109.0 ### Changes From 9389e110070c444058705d3751be803a2ab5b009 Mon Sep 17 00:00:00 2001 From: link2xt Date: Sun, 19 Feb 2023 23:07:23 +0000 Subject: [PATCH 19/24] ffi: log create_contact() errors --- deltachat-ffi/src/lib.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/deltachat-ffi/src/lib.rs b/deltachat-ffi/src/lib.rs index 1866a367f..cd9b2521a 100644 --- a/deltachat-ffi/src/lib.rs +++ b/deltachat-ffi/src/lib.rs @@ -2003,12 +2003,10 @@ pub unsafe extern "C" fn dc_create_contact( let ctx = &*context; let name = to_string_lossy(name); - block_on(async move { - Contact::create(ctx, &name, &to_string_lossy(addr)) - .await - .map(|id| id.to_u32()) - .unwrap_or(0) - }) + block_on(Contact::create(ctx, &name, &to_string_lossy(addr))) + .log_err(ctx, "Cannot create contact") + .map(|id| id.to_u32()) + .unwrap_or(0) } #[no_mangle] From 446214fd7ba0cd8a4468ffe981bdef56f480e0c0 Mon Sep 17 00:00:00 2001 From: link2xt Date: Sun, 19 Feb 2023 15:01:33 +0000 Subject: [PATCH 20/24] sql: use transaction in Contact::add_or_lookup() --- CHANGELOG.md | 1 + src/contact.rs | 235 +++++++++++++++++++++++++------------------------ 2 files changed, 122 insertions(+), 114 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e3ef2a58a..098244902 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ ## Unreleased ### Changes +- use transaction in `Contact::add_or_lookup()` #4059 ### Fixes diff --git a/src/contact.rs b/src/contact.rs index b86cbedb7..d27ccc361 100644 --- a/src/contact.rs +++ b/src/contact.rs @@ -13,6 +13,7 @@ use async_channel::{self as channel, Receiver, Sender}; use deltachat_derive::{FromSql, ToSql}; use once_cell::sync::Lazy; use regex::Regex; +use rusqlite::OptionalExtension; use serde::{Deserialize, Serialize}; use tokio::task; use tokio::time::{timeout, Duration}; @@ -520,8 +521,6 @@ impl Contact { /// Depending on the origin, both, "row_name" and "row_authname" are updated from "name". /// /// Returns the contact_id and a `Modifier` value indicating if a modification occurred. - /// - /// Returns None if the contact with such address cannot exist. pub(crate) async fn add_or_lookup( context: &Context, name: &str, @@ -566,14 +565,12 @@ impl Contact { ); let mut update_addr = false; - let mut row_id = 0; - if let Some((id, row_name, row_addr, row_origin, row_authname)) = context - .sql - .query_row_optional( - "SELECT id, name, addr, origin, authname \ - FROM contacts WHERE addr=? COLLATE NOCASE;", - paramsv![addr.to_string()], + let row_id = context.sql.transaction(|transaction| { + let row = transaction.query_row( + "SELECT id, name, addr, origin, authname + FROM contacts WHERE addr=? COLLATE NOCASE", + [addr.to_string()], |row| { let row_id: isize = row.get(0)?; let row_name: String = row.get(1)?; @@ -582,120 +579,130 @@ impl Contact { let row_authname: String = row.get(4)?; Ok((row_id, row_name, row_addr, row_origin, row_authname)) - }, - ) - .await? - { - let update_name = manual && name != row_name; - let update_authname = !manual - && name != row_authname - && !name.is_empty() - && (origin >= row_origin - || origin == Origin::IncomingUnknownFrom - || row_authname.is_empty()); + }).optional()?; - row_id = u32::try_from(id)?; - if origin >= row_origin && addr.as_ref() != row_addr { - update_addr = true; - } - if update_name || update_authname || update_addr || origin > row_origin { - let new_name = if update_name { - name.to_string() - } else { - row_name - }; + let row_id; + if let Some((id, row_name, row_addr, row_origin, row_authname)) = row { + let update_name = manual && name != row_name; + let update_authname = !manual + && name != row_authname + && !name.is_empty() + && (origin >= row_origin + || origin == Origin::IncomingUnknownFrom + || row_authname.is_empty()); - context - .sql - .execute( - "UPDATE contacts SET name=?, addr=?, origin=?, authname=? WHERE id=?;", - paramsv![ - new_name, - if update_addr { - addr.to_string() - } else { - row_addr - }, - if origin > row_origin { - origin - } else { - row_origin - }, - if update_authname { - name.to_string() - } else { - row_authname - }, - row_id - ], - ) - .await - .ok(); + row_id = u32::try_from(id)?; + if origin >= row_origin && addr.as_ref() != row_addr { + update_addr = true; + } + if update_name || update_authname || update_addr || origin > row_origin { + let new_name = if update_name { + name.to_string() + } else { + row_name + }; - if update_name || update_authname { - // Update the contact name also if it is used as a group name. - // This is one of the few duplicated data, however, getting the chat list is easier this way. - let chat_id: Option = context.sql.query_get_value( - "SELECT id FROM chats WHERE type=? AND id IN(SELECT chat_id FROM chats_contacts WHERE contact_id=?)", - paramsv![Chattype::Single, isize::try_from(row_id)?] - ).await?; - if let Some(chat_id) = chat_id { - let contact = Contact::get_by_id(context, ContactId::new(row_id)).await?; - let chat_name = contact.get_display_name(); - match context - .sql - .execute( - "UPDATE chats SET name=?1 WHERE id=?2 AND name!=?3", - paramsv![chat_name, chat_id, chat_name], - ) - .await - { - Err(err) => warn!(context, "Can't update chat name: {}", err), - Ok(count) => { - if count > 0 { - // Chat name updated - context.emit_event(EventType::ChatModified(ChatId::new( - chat_id.try_into()?, - ))); - } + transaction + .execute( + "UPDATE contacts SET name=?, addr=?, origin=?, authname=? WHERE id=?;", + paramsv![ + new_name, + if update_addr { + addr.to_string() + } else { + row_addr + }, + if origin > row_origin { + origin + } else { + row_origin + }, + if update_authname { + name.to_string() + } else { + row_authname + }, + row_id + ], + )?; + + if update_name || update_authname { + // Update the contact name also if it is used as a group name. + // This is one of the few duplicated data, however, getting the chat list is easier this way. + let chat_id: Option = transaction.query_row( + "SELECT id FROM chats WHERE type=? AND id IN(SELECT chat_id FROM chats_contacts WHERE contact_id=?)", + params![Chattype::Single, isize::try_from(row_id)?], + |row| { + let chat_id: ChatId = row.get(0)?; + Ok(chat_id) + } + ).optional()?; + + if let Some(chat_id) = chat_id { + let contact_id = ContactId::new(row_id); + let (addr, name, authname) = + transaction.query_row( + "SELECT addr, name, authname + FROM contacts + WHERE id=?", + params![contact_id], + |row| { + let addr: String = row.get(0)?; + let name: String = row.get(1)?; + let authname: String = row.get(2)?; + Ok((addr, name, authname)) + })?; + + let chat_name = if !name.is_empty() { + name + } else if !authname.is_empty() { + authname + } else { + addr + }; + + let count = transaction.execute( + "UPDATE chats SET name=?1 WHERE id=?2 AND name!=?1", + params![chat_name, chat_id])?; + + if count > 0 { + // Chat name updated + context.emit_event(EventType::ChatModified(chat_id)); } } } + sth_modified = Modifier::Modified; } - sth_modified = Modifier::Modified; - } - } else { - let update_name = manual; - let update_authname = !manual; - - if let Ok(new_row_id) = context - .sql - .insert( - "INSERT INTO contacts (name, addr, origin, authname) VALUES(?, ?, ?, ?);", - paramsv![ - if update_name { - name.to_string() - } else { - "".to_string() - }, - addr, - origin, - if update_authname { - name.to_string() - } else { - "".to_string() - } - ], - ) - .await - { - row_id = u32::try_from(new_row_id)?; - sth_modified = Modifier::Created; - info!(context, "added contact id={} addr={}", row_id, &addr); } else { - error!(context, "Cannot add contact."); + let update_name = manual; + let update_authname = !manual; + + transaction + .execute( + "INSERT INTO contacts (name, addr, origin, authname) + VALUES (?, ?, ?, ?);", + params![ + if update_name { + name.to_string() + } else { + "".to_string() + }, + addr, + origin, + if update_authname { + name.to_string() + } else { + "".to_string() + } + ], + )?; + + sth_modified = Modifier::Created; + row_id = u32::try_from(transaction.last_insert_rowid())?; + info!(context, "added contact id={} addr={}", row_id, &addr); } - } + Ok(row_id) + }).await?; Ok((ContactId::new(row_id), sth_modified)) } From e2151e26ee1ade962d56782920ffbccd7dc74289 Mon Sep 17 00:00:00 2001 From: link2xt Date: Sun, 19 Feb 2023 23:37:34 +0000 Subject: [PATCH 21/24] ci: pin rustfmt version --- .github/workflows/ci.yml | 22 ++++++++-------------- 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4000e7296..4cb9e1532 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,26 +10,20 @@ env: RUSTFLAGS: -Dwarnings jobs: - - fmt: - name: Rustfmt + lint: + name: Rustfmt and Clippy runs-on: ubuntu-latest + env: + RUSTUP_TOOLCHAIN: 1.67.1 steps: - uses: actions/checkout@v3 - - run: cargo fmt --all -- --check - - run_clippy: - name: Clippy - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Install clippy - run: rustup toolchain install 1.67.1 --component clippy + - name: Install rustfmt and clippy + run: rustup toolchain install $RUSTUP_TOOLCHAIN --component rustfmt --component clippy - name: Cache rust cargo artifacts uses: swatinem/rust-cache@v2 + - name: Run rustfmt + run: cargo fmt --all -- --check - name: Run clippy - env: - RUSTUP_TOOLCHAIN: 1.67.1 run: scripts/clippy.sh docs: From 0dd87b0bae241330def97b5a65074853d1e5501c Mon Sep 17 00:00:00 2001 From: link2xt Date: Mon, 20 Feb 2023 11:48:57 +0000 Subject: [PATCH 22/24] ci: format .yml with prettier --- .github/workflows/ci.yml | 114 +++++++++--------- .../workflows/jsonrpc-client-npm-package.yml | 17 ++- .github/workflows/node-delete-preview.yml | 39 +++--- .github/workflows/node-docs.yml | 40 +++--- .github/workflows/node-package.yml | 15 ++- .github/workflows/node-tests.yml | 8 +- .github/workflows/repl.yml | 16 +-- .github/workflows/upload-docs.yml | 26 ++-- .github/workflows/upload-ffi-docs.yml | 26 ++-- 9 files changed, 147 insertions(+), 154 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4cb9e1532..cf8185b22 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -8,7 +8,7 @@ on: env: RUSTFLAGS: -Dwarnings - + jobs: lint: name: Rustfmt and Clippy @@ -63,73 +63,73 @@ jobs: python: 3.7 runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@master + - uses: actions/checkout@master - - name: Install Rust ${{ matrix.rust }} - run: rustup toolchain install ${{ matrix.rust }} - - run: rustup override set ${{ matrix.rust }} + - name: Install Rust ${{ matrix.rust }} + run: rustup toolchain install ${{ matrix.rust }} + - run: rustup override set ${{ matrix.rust }} - - name: Cache rust cargo artifacts - uses: swatinem/rust-cache@v2 + - name: Cache rust cargo artifacts + uses: swatinem/rust-cache@v2 - - name: Check - run: cargo check --workspace --bins --examples --tests --benches + - name: Check + run: cargo check --workspace --bins --examples --tests --benches - - name: Tests - run: cargo test --workspace + - name: Tests + run: cargo test --workspace - - name: Test cargo vendor - run: cargo vendor + - name: Test cargo vendor + run: cargo vendor - - name: Install python - if: ${{ matrix.python }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python }} + - name: Install python + if: ${{ matrix.python }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }} - - name: Install tox - if: ${{ matrix.python }} - run: pip install tox + - name: Install tox + if: ${{ matrix.python }} + run: pip install tox - - name: Build C library - if: ${{ matrix.python }} - run: cargo build -p deltachat_ffi --features jsonrpc + - name: Build C library + if: ${{ matrix.python }} + run: cargo build -p deltachat_ffi --features jsonrpc - - name: Run python tests - if: ${{ matrix.python }} - env: - DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }} - DCC_RS_TARGET: debug - DCC_RS_DEV: ${{ github.workspace }} - working-directory: python - run: tox -e lint,mypy,doc,py3 + - name: Run python tests + if: ${{ matrix.python }} + env: + DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }} + DCC_RS_TARGET: debug + DCC_RS_DEV: ${{ github.workspace }} + working-directory: python + run: tox -e lint,mypy,doc,py3 - - name: Build deltachat-rpc-server - if: ${{ matrix.python }} - run: cargo build -p deltachat-rpc-server + - name: Build deltachat-rpc-server + if: ${{ matrix.python }} + run: cargo build -p deltachat-rpc-server - - name: Add deltachat-rpc-server to path - if: ${{ matrix.python }} - run: echo ${{ github.workspace }}/target/debug >> $GITHUB_PATH + - name: Add deltachat-rpc-server to path + if: ${{ matrix.python }} + run: echo ${{ github.workspace }}/target/debug >> $GITHUB_PATH - - name: Run deltachat-rpc-client tests - if: ${{ matrix.python }} - env: - DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }} - working-directory: deltachat-rpc-client - run: tox -e py3,lint + - name: Run deltachat-rpc-client tests + if: ${{ matrix.python }} + env: + DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }} + working-directory: deltachat-rpc-client + run: tox -e py3,lint - - name: Install pypy - if: ${{ matrix.python }} - uses: actions/setup-python@v4 - with: - python-version: 'pypy${{ matrix.python }}' + - name: Install pypy + if: ${{ matrix.python }} + uses: actions/setup-python@v4 + with: + python-version: "pypy${{ matrix.python }}" - - name: Run pypy tests - if: ${{ matrix.python }} - env: - DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }} - DCC_RS_TARGET: debug - DCC_RS_DEV: ${{ github.workspace }} - working-directory: python - run: tox -e pypy3 + - name: Run pypy tests + if: ${{ matrix.python }} + env: + DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }} + DCC_RS_TARGET: debug + DCC_RS_DEV: ${{ github.workspace }} + working-directory: python + run: tox -e pypy3 diff --git a/.github/workflows/jsonrpc-client-npm-package.yml b/.github/workflows/jsonrpc-client-npm-package.yml index a1c3b4b82..7d7f0145e 100644 --- a/.github/workflows/jsonrpc-client-npm-package.yml +++ b/.github/workflows/jsonrpc-client-npm-package.yml @@ -1,15 +1,14 @@ -name: 'jsonrpc js client build' +name: "jsonrpc js client build" on: pull_request: push: tags: - - '*' - - '!py-*' - + - "*" + - "!py-*" jobs: pack-module: - name: 'Package @deltachat/jsonrpc-client and upload to download.delta.chat' + name: "Package @deltachat/jsonrpc-client and upload to download.delta.chat" runs-on: ubuntu-18.04 steps: - name: Install tree @@ -18,7 +17,7 @@ jobs: uses: actions/checkout@v3 - uses: actions/setup-node@v3 with: - node-version: '16' + node-version: "16" - name: Get tag id: tag uses: dawidd6/action-get-tag@v1 @@ -69,9 +68,9 @@ jobs: if: steps.upload-preview.outcome == 'success' run: node ./node/scripts/postLinksToDetails.js env: - URL: preview/${{ env.DELTACHAT_JSONRPC_TAR_GZ }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - MSG_CONTEXT: Download the deltachat-jsonrpc-client.tgz + URL: preview/${{ env.DELTACHAT_JSONRPC_TAR_GZ }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + MSG_CONTEXT: Download the deltachat-jsonrpc-client.tgz # Upload to download.delta.chat/node/ - name: Upload deltachat-jsonrpc-client build to download.delta.chat/node/ if: ${{ steps.tag.outputs.tag }} diff --git a/.github/workflows/node-delete-preview.yml b/.github/workflows/node-delete-preview.yml index ba698c846..447448cd1 100644 --- a/.github/workflows/node-delete-preview.yml +++ b/.github/workflows/node-delete-preview.yml @@ -7,26 +7,25 @@ on: jobs: delete: - runs-on: ubuntu-latest steps: - - name: Get Pullrequest ID - id: getid - run: | - export PULLREQUEST_ID=$(jq .number < $GITHUB_EVENT_PATH) - echo "prid=$PULLREQUEST_ID" >> $GITHUB_OUTPUT - - name: Renaming - run: | - # create empty file to copy it over the outdated deliverable on download.delta.chat - echo "This preview build is outdated and has been removed." > empty - cp empty deltachat-node-${{ steps.getid.outputs.prid }}.tar.gz - - name: Replace builds with dummy files - uses: horochx/deploy-via-scp@v1.0.1 - with: - user: ${{ secrets.USERNAME }} - key: ${{ secrets.SSH_KEY }} - host: "download.delta.chat" - port: 22 - local: "deltachat-node-${{ steps.getid.outputs.prid }}.tar.gz" - remote: "/var/www/html/download/node/preview/" + - name: Get Pullrequest ID + id: getid + run: | + export PULLREQUEST_ID=$(jq .number < $GITHUB_EVENT_PATH) + echo "prid=$PULLREQUEST_ID" >> $GITHUB_OUTPUT + - name: Renaming + run: | + # create empty file to copy it over the outdated deliverable on download.delta.chat + echo "This preview build is outdated and has been removed." > empty + cp empty deltachat-node-${{ steps.getid.outputs.prid }}.tar.gz + - name: Replace builds with dummy files + uses: horochx/deploy-via-scp@v1.0.1 + with: + user: ${{ secrets.USERNAME }} + key: ${{ secrets.SSH_KEY }} + host: "download.delta.chat" + port: 22 + local: "deltachat-node-${{ steps.getid.outputs.prid }}.tar.gz" + remote: "/var/www/html/download/node/preview/" diff --git a/.github/workflows/node-docs.yml b/.github/workflows/node-docs.yml index a7c07e0d9..1cd1a60d4 100644 --- a/.github/workflows/node-docs.yml +++ b/.github/workflows/node-docs.yml @@ -9,26 +9,26 @@ jobs: generate: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v3 - - name: Use Node.js 16.x - uses: actions/setup-node@v3 - with: - node-version: 16.x + - name: Use Node.js 16.x + uses: actions/setup-node@v3 + with: + node-version: 16.x - - name: npm install and generate documentation - run: | - cd node - npm i --ignore-scripts - npx typedoc - mv docs js + - name: npm install and generate documentation + run: | + cd node + npm i --ignore-scripts + npx typedoc + mv docs js - - name: Upload - uses: horochx/deploy-via-scp@v1.0.1 - with: - user: ${{ secrets.USERNAME }} - key: ${{ secrets.KEY }} - host: "delta.chat" - port: 22 - local: "node/js" - remote: "/var/www/html/" + - name: Upload + uses: horochx/deploy-via-scp@v1.0.1 + with: + user: ${{ secrets.USERNAME }} + key: ${{ secrets.KEY }} + host: "delta.chat" + port: 22 + local: "node/js" + remote: "/var/www/html/" diff --git a/.github/workflows/node-package.yml b/.github/workflows/node-package.yml index f0cb1cc09..b737ca00c 100644 --- a/.github/workflows/node-package.yml +++ b/.github/workflows/node-package.yml @@ -1,11 +1,10 @@ -name: 'node.js build' +name: "node.js build" on: pull_request: push: tags: - - '*' - - '!py-*' - + - "*" + - "!py-*" jobs: prebuild: @@ -19,7 +18,7 @@ jobs: uses: actions/checkout@v3 - uses: actions/setup-node@v3 with: - node-version: '16' + node-version: "16" - name: System info run: | rustc -vV @@ -74,7 +73,7 @@ jobs: uses: actions/checkout@v3 - uses: actions/setup-node@v2 with: - node-version: '16' + node-version: "16" - name: Get tag id: tag uses: dawidd6/action-get-tag@v1 @@ -152,8 +151,8 @@ jobs: if: steps.upload-preview.outcome == 'success' run: node ./node/scripts/postLinksToDetails.js env: - URL: preview/${{ env.DELTACHAT_NODE_TAR_GZ }} - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + URL: preview/${{ env.DELTACHAT_NODE_TAR_GZ }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Upload to download.delta.chat/node/ - name: Upload deltachat-node build to download.delta.chat/node/ if: ${{ steps.tag.outputs.tag }} diff --git a/.github/workflows/node-tests.yml b/.github/workflows/node-tests.yml index e0d557d42..73e8d0cc4 100644 --- a/.github/workflows/node-tests.yml +++ b/.github/workflows/node-tests.yml @@ -1,4 +1,4 @@ -name: 'node.js tests' +name: "node.js tests" on: pull_request: push: @@ -19,7 +19,7 @@ jobs: uses: actions/checkout@v3 - uses: actions/setup-node@v3 with: - node-version: '16' + node-version: "16" - name: System info run: | rustc -vV @@ -59,7 +59,7 @@ jobs: npm run test env: DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }} - NODE_OPTIONS: '--force-node-api-uncaught-exceptions-policy=true' + NODE_OPTIONS: "--force-node-api-uncaught-exceptions-policy=true" - name: Run tests on Windows, except lint timeout-minutes: 10 if: runner.os == 'Windows' @@ -68,4 +68,4 @@ jobs: npm run test:mocha env: DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }} - NODE_OPTIONS: '--force-node-api-uncaught-exceptions-policy=true' + NODE_OPTIONS: "--force-node-api-uncaught-exceptions-policy=true" diff --git a/.github/workflows/repl.yml b/.github/workflows/repl.yml index efad7558e..a3d391fe2 100644 --- a/.github/workflows/repl.yml +++ b/.github/workflows/repl.yml @@ -11,13 +11,13 @@ jobs: name: Build REPL example runs-on: windows-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v3 - - name: Build - run: cargo build -p deltachat-repl --features vendored + - name: Build + run: cargo build -p deltachat-repl --features vendored - - name: Upload binary - uses: actions/upload-artifact@v3 - with: - name: repl.exe - path: 'target/debug/deltachat-repl.exe' + - name: Upload binary + uses: actions/upload-artifact@v3 + with: + name: repl.exe + path: "target/debug/deltachat-repl.exe" diff --git a/.github/workflows/upload-docs.yml b/.github/workflows/upload-docs.yml index c624cb081..8f0ceb3c0 100644 --- a/.github/workflows/upload-docs.yml +++ b/.github/workflows/upload-docs.yml @@ -8,20 +8,18 @@ on: jobs: build: - runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - name: Build the documentation with cargo - run: | - cargo doc --package deltachat --no-deps - - name: Upload to rs.delta.chat - uses: up9cloud/action-rsync@v1.3 - env: - USER: ${{ secrets.USERNAME }} - KEY: ${{ secrets.KEY }} - HOST: "delta.chat" - SOURCE: "target/doc" - TARGET: "/var/www/html/rs/" - + - uses: actions/checkout@v3 + - name: Build the documentation with cargo + run: | + cargo doc --package deltachat --no-deps + - name: Upload to rs.delta.chat + uses: up9cloud/action-rsync@v1.3 + env: + USER: ${{ secrets.USERNAME }} + KEY: ${{ secrets.KEY }} + HOST: "delta.chat" + SOURCE: "target/doc" + TARGET: "/var/www/html/rs/" diff --git a/.github/workflows/upload-ffi-docs.yml b/.github/workflows/upload-ffi-docs.yml index 73fa07cb1..c230b715e 100644 --- a/.github/workflows/upload-ffi-docs.yml +++ b/.github/workflows/upload-ffi-docs.yml @@ -8,20 +8,18 @@ on: jobs: build: - runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - name: Build the documentation with cargo - run: | - cargo doc --package deltachat_ffi --no-deps - - name: Upload to cffi.delta.chat - uses: up9cloud/action-rsync@v1.3 - env: - USER: ${{ secrets.USERNAME }} - KEY: ${{ secrets.KEY }} - HOST: "delta.chat" - SOURCE: "target/doc" - TARGET: "/var/www/html/cffi/" - + - uses: actions/checkout@v3 + - name: Build the documentation with cargo + run: | + cargo doc --package deltachat_ffi --no-deps + - name: Upload to cffi.delta.chat + uses: up9cloud/action-rsync@v1.3 + env: + USER: ${{ secrets.USERNAME }} + KEY: ${{ secrets.KEY }} + HOST: "delta.chat" + SOURCE: "target/doc" + TARGET: "/var/www/html/cffi/" From 840497d3569698433dfb1527fde433f6999c87a5 Mon Sep 17 00:00:00 2001 From: link2xt Date: Mon, 20 Feb 2023 11:49:59 +0000 Subject: [PATCH 23/24] format mergeable.yml --- .github/mergeable.yml | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/mergeable.yml b/.github/mergeable.yml index bf4c87862..9eeb3615c 100644 --- a/.github/mergeable.yml +++ b/.github/mergeable.yml @@ -5,22 +5,22 @@ mergeable: validate: - do: or validate: - - do: description - must_include: - regex: '#skip-changelog' - - do: and - validate: - - do: dependent - changed: - file: 'src/**' - required: ['CHANGELOG.md'] - - do: dependent - changed: - file: 'deltachat-ffi/src/**' - required: ['CHANGELOG.md'] + - do: description + must_include: + regex: "#skip-changelog" + - do: and + validate: + - do: dependent + changed: + file: "src/**" + required: ["CHANGELOG.md"] + - do: dependent + changed: + file: "deltachat-ffi/src/**" + required: ["CHANGELOG.md"] fail: - do: checks - status: 'action_required' + status: "action_required" payload: title: Changelog might need an update summary: "Check if CHANGELOG.md needs an update or add #skip-changelog to the PR description." From eaa2ef5a44068da89d1f3b6a17f9584ef2cfa91e Mon Sep 17 00:00:00 2001 From: link2xt Date: Mon, 20 Feb 2023 00:28:50 +0000 Subject: [PATCH 24/24] sql: start transactions as IMMEDIATE With the introduction of transactions in Contact::add_or_lookup(), python tests sometimes fail to create contacts with the folowing error: Cannot create contact: add_or_lookup: database is locked: Error code 5: The database file is locked `PRAGMA busy_timeout=60000` does not affect this case as the error is returned before 60 seconds pass. DEFERRED transactions with write operations need to be retried from scratch if they are rolled back due to a write operation on another connection. Using IMMEDIATE transactions for writing is an attempt to fix this problem without a retry loop. If we later need DEFERRED transactions, e.g. for reading a snapshot without locking the database, we may introduce another function for this. --- CHANGELOG.md | 1 + src/sql.rs | 10 ++++++++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 098244902..1dfa0c98d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ - use transaction in `Contact::add_or_lookup()` #4059 ### Fixes +- Start SQL transactions with IMMEDIATE behaviour rather than default DEFERRED one. #4063 ### API-Changes diff --git a/src/sql.rs b/src/sql.rs index 6d6a53fec..4b4b190eb 100644 --- a/src/sql.rs +++ b/src/sql.rs @@ -6,7 +6,7 @@ use std::path::Path; use std::path::PathBuf; use anyhow::{bail, Context as _, Result}; -use rusqlite::{self, config::DbConfig, Connection, OpenFlags}; +use rusqlite::{self, config::DbConfig, Connection, OpenFlags, TransactionBehavior}; use tokio::sync::RwLock; use crate::blob::BlobObject; @@ -377,6 +377,12 @@ impl Sql { /// /// If the function returns an error, the transaction will be rolled back. If it does not return an /// error, the transaction will be committed. + /// + /// Transactions started use IMMEDIATE behavior + /// rather than default DEFERRED behavior + /// to avoid "database is busy" errors + /// which may happen when DEFERRED transaction + /// is attempted to be promoted to a write transaction. pub async fn transaction(&self, callback: G) -> Result where H: Send + 'static, @@ -384,7 +390,7 @@ impl Sql { { let mut conn = self.get_conn().await?; tokio::task::block_in_place(move || { - let mut transaction = conn.transaction()?; + let mut transaction = conn.transaction_with_behavior(TransactionBehavior::Immediate)?; let ret = callback(&mut transaction); match ret {