Merge branch 'master' into flub/send-backup

This commit is contained in:
Floris Bruynooghe
2023-02-20 17:39:52 +01:00
36 changed files with 641 additions and 649 deletions

28
.github/mergeable.yml vendored
View File

@@ -5,22 +5,22 @@ mergeable:
validate: validate:
- do: or - do: or
validate: validate:
- do: description - do: description
must_include: must_include:
regex: '#skip-changelog' regex: "#skip-changelog"
- do: and - do: and
validate: validate:
- do: dependent - do: dependent
changed: changed:
file: 'src/**' file: "src/**"
required: ['CHANGELOG.md'] required: ["CHANGELOG.md"]
- do: dependent - do: dependent
changed: changed:
file: 'deltachat-ffi/src/**' file: "deltachat-ffi/src/**"
required: ['CHANGELOG.md'] required: ["CHANGELOG.md"]
fail: fail:
- do: checks - do: checks
status: 'action_required' status: "action_required"
payload: payload:
title: Changelog might need an update title: Changelog might need an update
summary: "Check if CHANGELOG.md needs an update or add #skip-changelog to the PR description." summary: "Check if CHANGELOG.md needs an update or add #skip-changelog to the PR description."

View File

@@ -8,28 +8,22 @@ on:
env: env:
RUSTFLAGS: -Dwarnings RUSTFLAGS: -Dwarnings
jobs: jobs:
lint:
fmt: name: Rustfmt and Clippy
name: Rustfmt
runs-on: ubuntu-latest runs-on: ubuntu-latest
env:
RUSTUP_TOOLCHAIN: 1.67.1
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- run: cargo fmt --all -- --check - name: Install rustfmt and clippy
run: rustup toolchain install $RUSTUP_TOOLCHAIN --component rustfmt --component clippy
run_clippy:
name: Clippy
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Install clippy
run: rustup toolchain install 1.67.1 --component clippy
- name: Cache rust cargo artifacts - name: Cache rust cargo artifacts
uses: swatinem/rust-cache@v2 uses: swatinem/rust-cache@v2
- name: Run rustfmt
run: cargo fmt --all -- --check
- name: Run clippy - name: Run clippy
env:
RUSTUP_TOOLCHAIN: 1.67.1
run: scripts/clippy.sh run: scripts/clippy.sh
docs: docs:
@@ -69,73 +63,73 @@ jobs:
python: 3.7 python: 3.7
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- uses: actions/checkout@master - uses: actions/checkout@master
- name: Install Rust ${{ matrix.rust }} - name: Install Rust ${{ matrix.rust }}
run: rustup toolchain install ${{ matrix.rust }} run: rustup toolchain install ${{ matrix.rust }}
- run: rustup override set ${{ matrix.rust }} - run: rustup override set ${{ matrix.rust }}
- name: Cache rust cargo artifacts - name: Cache rust cargo artifacts
uses: swatinem/rust-cache@v2 uses: swatinem/rust-cache@v2
- name: Check - name: Check
run: cargo check --workspace --bins --examples --tests --benches run: cargo check --workspace --bins --examples --tests --benches
- name: Tests - name: Tests
run: cargo test --workspace run: cargo test --workspace
- name: Test cargo vendor - name: Test cargo vendor
run: cargo vendor run: cargo vendor
- name: Install python - name: Install python
if: ${{ matrix.python }} if: ${{ matrix.python }}
uses: actions/setup-python@v4 uses: actions/setup-python@v4
with: with:
python-version: ${{ matrix.python }} python-version: ${{ matrix.python }}
- name: Install tox - name: Install tox
if: ${{ matrix.python }} if: ${{ matrix.python }}
run: pip install tox run: pip install tox
- name: Build C library - name: Build C library
if: ${{ matrix.python }} if: ${{ matrix.python }}
run: cargo build -p deltachat_ffi --features jsonrpc run: cargo build -p deltachat_ffi --features jsonrpc
- name: Run python tests - name: Run python tests
if: ${{ matrix.python }} if: ${{ matrix.python }}
env: env:
DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }} DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }}
DCC_RS_TARGET: debug DCC_RS_TARGET: debug
DCC_RS_DEV: ${{ github.workspace }} DCC_RS_DEV: ${{ github.workspace }}
working-directory: python working-directory: python
run: tox -e lint,mypy,doc,py3 run: tox -e lint,mypy,doc,py3
- name: Build deltachat-rpc-server - name: Build deltachat-rpc-server
if: ${{ matrix.python }} if: ${{ matrix.python }}
run: cargo build -p deltachat-rpc-server run: cargo build -p deltachat-rpc-server
- name: Add deltachat-rpc-server to path - name: Add deltachat-rpc-server to path
if: ${{ matrix.python }} if: ${{ matrix.python }}
run: echo ${{ github.workspace }}/target/debug >> $GITHUB_PATH run: echo ${{ github.workspace }}/target/debug >> $GITHUB_PATH
- name: Run deltachat-rpc-client tests - name: Run deltachat-rpc-client tests
if: ${{ matrix.python }} if: ${{ matrix.python }}
env: env:
DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }} DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }}
working-directory: deltachat-rpc-client working-directory: deltachat-rpc-client
run: tox -e py3,lint run: tox -e py3,lint
- name: Install pypy - name: Install pypy
if: ${{ matrix.python }} if: ${{ matrix.python }}
uses: actions/setup-python@v4 uses: actions/setup-python@v4
with: with:
python-version: 'pypy${{ matrix.python }}' python-version: "pypy${{ matrix.python }}"
- name: Run pypy tests - name: Run pypy tests
if: ${{ matrix.python }} if: ${{ matrix.python }}
env: env:
DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }} DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }}
DCC_RS_TARGET: debug DCC_RS_TARGET: debug
DCC_RS_DEV: ${{ github.workspace }} DCC_RS_DEV: ${{ github.workspace }}
working-directory: python working-directory: python
run: tox -e pypy3 run: tox -e pypy3

View File

@@ -1,15 +1,14 @@
name: 'jsonrpc js client build' name: "jsonrpc js client build"
on: on:
pull_request: pull_request:
push: push:
tags: tags:
- '*' - "*"
- '!py-*' - "!py-*"
jobs: jobs:
pack-module: pack-module:
name: 'Package @deltachat/jsonrpc-client and upload to download.delta.chat' name: "Package @deltachat/jsonrpc-client and upload to download.delta.chat"
runs-on: ubuntu-18.04 runs-on: ubuntu-18.04
steps: steps:
- name: Install tree - name: Install tree
@@ -18,7 +17,7 @@ jobs:
uses: actions/checkout@v3 uses: actions/checkout@v3
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3
with: with:
node-version: '16' node-version: "16"
- name: Get tag - name: Get tag
id: tag id: tag
uses: dawidd6/action-get-tag@v1 uses: dawidd6/action-get-tag@v1
@@ -69,9 +68,9 @@ jobs:
if: steps.upload-preview.outcome == 'success' if: steps.upload-preview.outcome == 'success'
run: node ./node/scripts/postLinksToDetails.js run: node ./node/scripts/postLinksToDetails.js
env: env:
URL: preview/${{ env.DELTACHAT_JSONRPC_TAR_GZ }} URL: preview/${{ env.DELTACHAT_JSONRPC_TAR_GZ }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
MSG_CONTEXT: Download the deltachat-jsonrpc-client.tgz MSG_CONTEXT: Download the deltachat-jsonrpc-client.tgz
# Upload to download.delta.chat/node/ # Upload to download.delta.chat/node/
- name: Upload deltachat-jsonrpc-client build to download.delta.chat/node/ - name: Upload deltachat-jsonrpc-client build to download.delta.chat/node/
if: ${{ steps.tag.outputs.tag }} if: ${{ steps.tag.outputs.tag }}

View File

@@ -35,6 +35,10 @@ jobs:
npm run test npm run test
env: env:
DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }} DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }}
- name: make sure websocket server version still builds
run: |
cd deltachat-jsonrpc
cargo build --bin deltachat-jsonrpc-server --features webserver
- name: Run linter - name: Run linter
run: | run: |
cd deltachat-jsonrpc/typescript cd deltachat-jsonrpc/typescript

View File

@@ -7,26 +7,25 @@ on:
jobs: jobs:
delete: delete:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Get Pullrequest ID - name: Get Pullrequest ID
id: getid id: getid
run: | run: |
export PULLREQUEST_ID=$(jq .number < $GITHUB_EVENT_PATH) export PULLREQUEST_ID=$(jq .number < $GITHUB_EVENT_PATH)
echo "prid=$PULLREQUEST_ID" >> $GITHUB_OUTPUT echo "prid=$PULLREQUEST_ID" >> $GITHUB_OUTPUT
- name: Renaming - name: Renaming
run: | run: |
# create empty file to copy it over the outdated deliverable on download.delta.chat # create empty file to copy it over the outdated deliverable on download.delta.chat
echo "This preview build is outdated and has been removed." > empty echo "This preview build is outdated and has been removed." > empty
cp empty deltachat-node-${{ steps.getid.outputs.prid }}.tar.gz cp empty deltachat-node-${{ steps.getid.outputs.prid }}.tar.gz
- name: Replace builds with dummy files - name: Replace builds with dummy files
uses: horochx/deploy-via-scp@v1.0.1 uses: horochx/deploy-via-scp@v1.0.1
with: with:
user: ${{ secrets.USERNAME }} user: ${{ secrets.USERNAME }}
key: ${{ secrets.SSH_KEY }} key: ${{ secrets.SSH_KEY }}
host: "download.delta.chat" host: "download.delta.chat"
port: 22 port: 22
local: "deltachat-node-${{ steps.getid.outputs.prid }}.tar.gz" local: "deltachat-node-${{ steps.getid.outputs.prid }}.tar.gz"
remote: "/var/www/html/download/node/preview/" remote: "/var/www/html/download/node/preview/"

View File

@@ -9,26 +9,26 @@ jobs:
generate: generate:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Use Node.js 16.x - name: Use Node.js 16.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 16.x node-version: 16.x
- name: npm install and generate documentation - name: npm install and generate documentation
run: | run: |
cd node cd node
npm i --ignore-scripts npm i --ignore-scripts
npx typedoc npx typedoc
mv docs js mv docs js
- name: Upload - name: Upload
uses: horochx/deploy-via-scp@v1.0.1 uses: horochx/deploy-via-scp@v1.0.1
with: with:
user: ${{ secrets.USERNAME }} user: ${{ secrets.USERNAME }}
key: ${{ secrets.KEY }} key: ${{ secrets.KEY }}
host: "delta.chat" host: "delta.chat"
port: 22 port: 22
local: "node/js" local: "node/js"
remote: "/var/www/html/" remote: "/var/www/html/"

View File

@@ -1,11 +1,10 @@
name: 'node.js build' name: "node.js build"
on: on:
pull_request: pull_request:
push: push:
tags: tags:
- '*' - "*"
- '!py-*' - "!py-*"
jobs: jobs:
prebuild: prebuild:
@@ -19,7 +18,7 @@ jobs:
uses: actions/checkout@v3 uses: actions/checkout@v3
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3
with: with:
node-version: '16' node-version: "16"
- name: System info - name: System info
run: | run: |
rustc -vV rustc -vV
@@ -74,7 +73,7 @@ jobs:
uses: actions/checkout@v3 uses: actions/checkout@v3
- uses: actions/setup-node@v2 - uses: actions/setup-node@v2
with: with:
node-version: '16' node-version: "16"
- name: Get tag - name: Get tag
id: tag id: tag
uses: dawidd6/action-get-tag@v1 uses: dawidd6/action-get-tag@v1
@@ -152,8 +151,8 @@ jobs:
if: steps.upload-preview.outcome == 'success' if: steps.upload-preview.outcome == 'success'
run: node ./node/scripts/postLinksToDetails.js run: node ./node/scripts/postLinksToDetails.js
env: env:
URL: preview/${{ env.DELTACHAT_NODE_TAR_GZ }} URL: preview/${{ env.DELTACHAT_NODE_TAR_GZ }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Upload to download.delta.chat/node/ # Upload to download.delta.chat/node/
- name: Upload deltachat-node build to download.delta.chat/node/ - name: Upload deltachat-node build to download.delta.chat/node/
if: ${{ steps.tag.outputs.tag }} if: ${{ steps.tag.outputs.tag }}

View File

@@ -1,4 +1,4 @@
name: 'node.js tests' name: "node.js tests"
on: on:
pull_request: pull_request:
push: push:
@@ -19,7 +19,7 @@ jobs:
uses: actions/checkout@v3 uses: actions/checkout@v3
- uses: actions/setup-node@v3 - uses: actions/setup-node@v3
with: with:
node-version: '16' node-version: "16"
- name: System info - name: System info
run: | run: |
rustc -vV rustc -vV
@@ -59,7 +59,7 @@ jobs:
npm run test npm run test
env: env:
DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }} DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }}
NODE_OPTIONS: '--force-node-api-uncaught-exceptions-policy=true' NODE_OPTIONS: "--force-node-api-uncaught-exceptions-policy=true"
- name: Run tests on Windows, except lint - name: Run tests on Windows, except lint
timeout-minutes: 10 timeout-minutes: 10
if: runner.os == 'Windows' if: runner.os == 'Windows'
@@ -68,4 +68,4 @@ jobs:
npm run test:mocha npm run test:mocha
env: env:
DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }} DCC_NEW_TMP_EMAIL: ${{ secrets.DCC_NEW_TMP_EMAIL }}
NODE_OPTIONS: '--force-node-api-uncaught-exceptions-policy=true' NODE_OPTIONS: "--force-node-api-uncaught-exceptions-policy=true"

View File

@@ -11,13 +11,13 @@ jobs:
name: Build REPL example name: Build REPL example
runs-on: windows-latest runs-on: windows-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Build - name: Build
run: cargo build -p deltachat-repl --features vendored run: cargo build -p deltachat-repl --features vendored
- name: Upload binary - name: Upload binary
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3
with: with:
name: repl.exe name: repl.exe
path: 'target/debug/deltachat-repl.exe' path: "target/debug/deltachat-repl.exe"

View File

@@ -8,20 +8,18 @@ on:
jobs: jobs:
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Build the documentation with cargo - name: Build the documentation with cargo
run: | run: |
cargo doc --package deltachat --no-deps cargo doc --package deltachat --no-deps
- name: Upload to rs.delta.chat - name: Upload to rs.delta.chat
uses: up9cloud/action-rsync@v1.3 uses: up9cloud/action-rsync@v1.3
env: env:
USER: ${{ secrets.USERNAME }} USER: ${{ secrets.USERNAME }}
KEY: ${{ secrets.KEY }} KEY: ${{ secrets.KEY }}
HOST: "delta.chat" HOST: "delta.chat"
SOURCE: "target/doc" SOURCE: "target/doc"
TARGET: "/var/www/html/rs/" TARGET: "/var/www/html/rs/"

View File

@@ -8,20 +8,18 @@ on:
jobs: jobs:
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Build the documentation with cargo - name: Build the documentation with cargo
run: | run: |
cargo doc --package deltachat_ffi --no-deps cargo doc --package deltachat_ffi --no-deps
- name: Upload to cffi.delta.chat - name: Upload to cffi.delta.chat
uses: up9cloud/action-rsync@v1.3 uses: up9cloud/action-rsync@v1.3
env: env:
USER: ${{ secrets.USERNAME }} USER: ${{ secrets.USERNAME }}
KEY: ${{ secrets.KEY }} KEY: ${{ secrets.KEY }}
HOST: "delta.chat" HOST: "delta.chat"
SOURCE: "target/doc" SOURCE: "target/doc"
TARGET: "/var/www/html/cffi/" TARGET: "/var/www/html/cffi/"

View File

@@ -3,20 +3,34 @@
## Unreleased ## Unreleased
### Changes ### Changes
- deltachat-rpc-client: use `dataclass` for `Account`, `Chat`, `Contact` and `Message` #4042 - use transaction in `Contact::add_or_lookup()` #4059
- python: mark bindings as supporting typing according to PEP 561 #4045
- retry filesystem operations during account migration #4043
- ability to send backup over network and QR code to setup second device #4007 - ability to send backup over network and QR code to setup second device #4007
### Fixes
- Start SQL transactions with IMMEDIATE behaviour rather than default DEFERRED one. #4063
### API-Changes
## 1.109.0
### Changes
- deltachat-rpc-client: use `dataclass` for `Account`, `Chat`, `Contact` and `Message` #4042
### Fixes ### Fixes
- deltachat-rpc-server: do not block stdin while processing the request. #4041 - deltachat-rpc-server: do not block stdin while processing the request. #4041
deltachat-rpc-server now reads the next request as soon as previous request handler is spawned. deltachat-rpc-server now reads the next request as soon as previous request handler is spawned.
- enable `auto_vacuum` on all SQL connections #2955 - Enable `auto_vacuum` on all SQL connections. #2955
- Replace `r2d2` connection pool with an own implementation. #4050 #4053 #4043 #4061
This change improves reliability
by closing all database connections immediately when the context is closed.
### API-Changes ### API-Changes
- Remove `MimeMessage::from_bytes()` public interface. #4033 - Remove `MimeMessage::from_bytes()` public interface. #4033
- BREAKING Types: jsonrpc: `get_messages` now returns a map with `MessageLoadResult` instead of failing completely if one of the requested messages could not be loaded. #4038 - BREAKING Types: jsonrpc: `get_messages` now returns a map with `MessageLoadResult` instead of failing completely if one of the requested messages could not be loaded. #4038
- Add `dc_msg_set_subject()`. C-FFI #4057
- Mark python bindings as supporting typing according to PEP 561 #4045
## 1.108.0 ## 1.108.0

188
Cargo.lock generated
View File

@@ -306,7 +306,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acee9fd5073ab6b045a275b3e709c163dd36c90685219cb21804a147b58dba43" checksum = "acee9fd5073ab6b045a275b3e709c163dd36c90685219cb21804a147b58dba43"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"axum-core 0.2.9", "axum-core",
"base64 0.13.1", "base64 0.13.1",
"bitflags", "bitflags",
"bytes", "bytes",
@@ -315,7 +315,7 @@ dependencies = [
"http-body", "http-body",
"hyper", "hyper",
"itoa", "itoa",
"matchit 0.5.0", "matchit",
"memchr", "memchr",
"mime", "mime",
"percent-encoding", "percent-encoding",
@@ -326,43 +326,7 @@ dependencies = [
"sha-1", "sha-1",
"sync_wrapper", "sync_wrapper",
"tokio", "tokio",
"tokio-tungstenite 0.17.2", "tokio-tungstenite",
"tower",
"tower-http",
"tower-layer",
"tower-service",
]
[[package]]
name = "axum"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5694b64066a2459918d8074c2ce0d5a88f409431994c2356617c8ae0c4721fc"
dependencies = [
"async-trait",
"axum-core 0.3.2",
"base64 0.20.0",
"bitflags",
"bytes",
"futures-util",
"http",
"http-body",
"hyper",
"itoa",
"matchit 0.7.0",
"memchr",
"mime",
"percent-encoding",
"pin-project-lite",
"rustversion",
"serde",
"serde_json",
"serde_path_to_error",
"serde_urlencoded",
"sha1",
"sync_wrapper",
"tokio",
"tokio-tungstenite 0.18.0",
"tower", "tower",
"tower-http", "tower-http",
"tower-layer", "tower-layer",
@@ -385,23 +349,6 @@ dependencies = [
"tower-service", "tower-service",
] ]
[[package]]
name = "axum-core"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1cae3e661676ffbacb30f1a824089a8c9150e71017f7e1e38f2aa32009188d34"
dependencies = [
"async-trait",
"bytes",
"futures-util",
"http",
"http-body",
"mime",
"rustversion",
"tower-layer",
"tower-service",
]
[[package]] [[package]]
name = "backtrace" name = "backtrace"
version = "0.3.67" version = "0.3.67"
@@ -441,12 +388,6 @@ version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8"
[[package]]
name = "base64"
version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ea22880d78093b0cbe17c89f64a7d457941e65759157ec6cb31a31d652b05e5"
[[package]] [[package]]
name = "base64" name = "base64"
version = "0.21.0" version = "0.21.0"
@@ -1055,7 +996,7 @@ checksum = "23d8666cb01533c39dde32bcbab8e227b4ed6679b2c925eba05feabea39508fb"
[[package]] [[package]]
name = "deltachat" name = "deltachat"
version = "1.108.0" version = "1.109.0"
dependencies = [ dependencies = [
"ansi_term", "ansi_term",
"anyhow", "anyhow",
@@ -1069,6 +1010,7 @@ dependencies = [
"bitflags", "bitflags",
"chrono", "chrono",
"criterion", "criterion",
"crossbeam-queue",
"deltachat_derive", "deltachat_derive",
"email", "email",
"encoded-words", "encoded-words",
@@ -1096,8 +1038,6 @@ dependencies = [
"proptest", "proptest",
"qrcodegen", "qrcodegen",
"quick-xml", "quick-xml",
"r2d2",
"r2d2_sqlite",
"rand 0.8.5", "rand 0.8.5",
"ratelimit", "ratelimit",
"regex", "regex",
@@ -1130,11 +1070,11 @@ dependencies = [
[[package]] [[package]]
name = "deltachat-jsonrpc" name = "deltachat-jsonrpc"
version = "1.108.0" version = "1.109.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-channel", "async-channel",
"axum 0.6.4", "axum",
"deltachat", "deltachat",
"env_logger 0.10.0", "env_logger 0.10.0",
"futures", "futures",
@@ -1152,7 +1092,7 @@ dependencies = [
[[package]] [[package]]
name = "deltachat-repl" name = "deltachat-repl"
version = "1.108.0" version = "1.109.0"
dependencies = [ dependencies = [
"ansi_term", "ansi_term",
"anyhow", "anyhow",
@@ -1167,7 +1107,7 @@ dependencies = [
[[package]] [[package]]
name = "deltachat-rpc-server" name = "deltachat-rpc-server"
version = "1.108.0" version = "1.109.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"deltachat-jsonrpc", "deltachat-jsonrpc",
@@ -1190,7 +1130,7 @@ dependencies = [
[[package]] [[package]]
name = "deltachat_ffi" name = "deltachat_ffi"
version = "1.108.0" version = "1.109.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"deltachat", "deltachat",
@@ -1924,15 +1864,6 @@ version = "1.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7"
[[package]]
name = "hashbrown"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
dependencies = [
"ahash",
]
[[package]] [[package]]
name = "hashbrown" name = "hashbrown"
version = "0.12.3" version = "0.12.3"
@@ -1944,11 +1875,11 @@ dependencies = [
[[package]] [[package]]
name = "hashlink" name = "hashlink"
version = "0.7.0" version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7249a3129cbc1ffccd74857f81464a323a152173cdb134e0fd81bc803b29facf" checksum = "69fe1fcf8b4278d860ad0548329f892a3631fb63f82574df68275f34cdbe0ffa"
dependencies = [ dependencies = [
"hashbrown 0.11.2", "hashbrown",
] ]
[[package]] [[package]]
@@ -2190,7 +2121,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"hashbrown 0.12.3", "hashbrown",
] ]
[[package]] [[package]]
@@ -2349,9 +2280,9 @@ checksum = "292a948cd991e376cf75541fe5b97a1081d713c618b4f1b9500f8844e49eb565"
[[package]] [[package]]
name = "libsqlite3-sys" name = "libsqlite3-sys"
version = "0.24.2" version = "0.25.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "898745e570c7d0453cc1fbc4a701eb6c662ed54e8fec8b7d14be137ebeeb9d14" checksum = "29f835d03d717946d28b1d1ed632eb6f0e24a299388ee623d0c23118d3e8a7fa"
dependencies = [ dependencies = [
"cc", "cc",
"openssl-sys", "openssl-sys",
@@ -2443,12 +2374,6 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73cbba799671b762df5a175adf59ce145165747bb891505c43d09aefbbf38beb" checksum = "73cbba799671b762df5a175adf59ce145165747bb891505c43d09aefbbf38beb"
[[package]]
name = "matchit"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b87248edafb776e59e6ee64a79086f65890d3510f2c656c000bf2a7e8a0aea40"
[[package]] [[package]]
name = "md-5" name = "md-5"
version = "0.10.5" version = "0.10.5"
@@ -3219,27 +3144,6 @@ version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20f14e071918cbeefc5edc986a7aa92c425dae244e003a35e1cdddb5ca39b5cb" checksum = "20f14e071918cbeefc5edc986a7aa92c425dae244e003a35e1cdddb5ca39b5cb"
[[package]]
name = "r2d2"
version = "0.8.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93"
dependencies = [
"log",
"parking_lot",
"scheduled-thread-pool",
]
[[package]]
name = "r2d2_sqlite"
version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fdc8e4da70586127893be32b7adf21326a4c6b1aba907611edf467d13ffe895"
dependencies = [
"r2d2",
"rusqlite",
]
[[package]] [[package]]
name = "radix_trie" name = "radix_trie"
version = "0.2.1" version = "0.2.1"
@@ -3530,16 +3434,15 @@ dependencies = [
[[package]] [[package]]
name = "rusqlite" name = "rusqlite"
version = "0.27.0" version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85127183a999f7db96d1a976a309eebbfb6ea3b0b400ddd8340190129de6eb7a" checksum = "01e213bc3ecb39ac32e81e51ebe31fd888a940515173e3a18a35f8c6e896422a"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"fallible-iterator", "fallible-iterator",
"fallible-streaming-iterator", "fallible-streaming-iterator",
"hashlink", "hashlink",
"libsqlite3-sys", "libsqlite3-sys",
"memchr",
"smallvec", "smallvec",
] ]
@@ -3700,15 +3603,6 @@ dependencies = [
"windows-sys 0.36.1", "windows-sys 0.36.1",
] ]
[[package]]
name = "scheduled-thread-pool"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "977a7519bff143a44f842fd07e80ad1329295bd71686457f18e496736f4bf9bf"
dependencies = [
"parking_lot",
]
[[package]] [[package]]
name = "scopeguard" name = "scopeguard"
version = "1.1.0" version = "1.1.0"
@@ -3845,15 +3739,6 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "serde_path_to_error"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "184c643044780f7ceb59104cef98a5a6f12cb2288a7bc701ab93a362b49fd47d"
dependencies = [
"serde",
]
[[package]] [[package]]
name = "serde_spanned" name = "serde_spanned"
version = "0.6.1" version = "0.6.1"
@@ -4366,19 +4251,7 @@ dependencies = [
"futures-util", "futures-util",
"log", "log",
"tokio", "tokio",
"tungstenite 0.17.3", "tungstenite",
]
[[package]]
name = "tokio-tungstenite"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54319c93411147bced34cb5609a80e0a8e44c5999c93903a81cd866630ec0bfd"
dependencies = [
"futures-util",
"log",
"tokio",
"tungstenite 0.18.0",
] ]
[[package]] [[package]]
@@ -4628,25 +4501,6 @@ dependencies = [
"utf-8", "utf-8",
] ]
[[package]]
name = "tungstenite"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30ee6ab729cd4cf0fd55218530c4522ed30b7b6081752839b68fcec8d0960788"
dependencies = [
"base64 0.13.1",
"byteorder",
"bytes",
"http",
"httparse",
"log",
"rand 0.8.5",
"sha1",
"thiserror",
"url",
"utf-8",
]
[[package]] [[package]]
name = "twofish" name = "twofish"
version = "0.7.1" version = "0.7.1"
@@ -4704,7 +4558,7 @@ version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5faade31a542b8b35855fff6e8def199853b2da8da256da52f52f1316ee3137" checksum = "c5faade31a542b8b35855fff6e8def199853b2da8da256da52f52f1316ee3137"
dependencies = [ dependencies = [
"hashbrown 0.12.3", "hashbrown",
"regex", "regex",
] ]
@@ -5166,7 +5020,7 @@ dependencies = [
"async-channel", "async-channel",
"async-mutex", "async-mutex",
"async-trait", "async-trait",
"axum 0.5.17", "axum",
"futures", "futures",
"futures-util", "futures-util",
"log", "log",

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "deltachat" name = "deltachat"
version = "1.108.0" version = "1.109.0"
edition = "2021" edition = "2021"
license = "MPL-2.0" license = "MPL-2.0"
rust-version = "1.63" rust-version = "1.63"
@@ -29,21 +29,24 @@ format-flowed = { path = "./format-flowed" }
ratelimit = { path = "./deltachat-ratelimit" } ratelimit = { path = "./deltachat-ratelimit" }
anyhow = "1" anyhow = "1"
async-channel = "1.8.0"
async-imap = { git = "https://github.com/async-email/async-imap", branch = "master", default-features = false, features = ["runtime-tokio"] } async-imap = { git = "https://github.com/async-email/async-imap", branch = "master", default-features = false, features = ["runtime-tokio"] }
async-native-tls = { version = "0.4", default-features = false, features = ["runtime-tokio"] } async-native-tls = { version = "0.4", default-features = false, features = ["runtime-tokio"] }
async-smtp = { version = "0.8", default-features = false, features = ["runtime-tokio"] } async-smtp = { version = "0.8", default-features = false, features = ["runtime-tokio"] }
trust-dns-resolver = "0.22" async_zip = { version = "0.0.9", default-features = false, features = ["deflate"] }
tokio = { version = "1", features = ["fs", "rt-multi-thread", "macros"] }
tokio-tar = { version = "0.3" } # TODO: integrate tokio into async-tar
backtrace = "0.3" backtrace = "0.3"
base64 = "0.21" base64 = "0.21"
bitflags = "1.3" bitflags = "1.3"
chrono = { version = "0.4", default-features=false, features = ["clock", "std"] } chrono = { version = "0.4", default-features=false, features = ["clock", "std"] }
crossbeam-queue = "0.3"
email = { git = "https://github.com/deltachat/rust-email", branch = "master" } email = { git = "https://github.com/deltachat/rust-email", branch = "master" }
encoded-words = { git = "https://github.com/async-email/encoded-words", branch = "master" } encoded-words = { git = "https://github.com/async-email/encoded-words", branch = "master" }
escaper = "0.1" escaper = "0.1"
fast-socks5 = "0.8"
futures = "0.3" futures = "0.3"
futures-lite = "1.12.0"
hex = "0.4.0" hex = "0.4.0"
humansize = "2"
image = { version = "0.24.5", default-features=false, features = ["gif", "jpeg", "ico", "png", "pnm", "webp", "bmp"] } image = { version = "0.24.5", default-features=false, features = ["gif", "jpeg", "ico", "png", "pnm", "webp", "bmp"] }
kamadak-exif = "0.5" kamadak-exif = "0.5"
lettre_email = { git = "https://github.com/deltachat/lettre", branch = "master" } lettre_email = { git = "https://github.com/deltachat/lettre", branch = "master" }
@@ -57,12 +60,12 @@ once_cell = "1.17.0"
percent-encoding = "2.2" percent-encoding = "2.2"
pgp = { version = "0.9", default-features = false } pgp = { version = "0.9", default-features = false }
pretty_env_logger = { version = "0.4", optional = true } pretty_env_logger = { version = "0.4", optional = true }
qrcodegen = "1.7.0"
quick-xml = "0.27" quick-xml = "0.27"
r2d2 = "0.8"
r2d2_sqlite = "0.20"
rand = "0.8" rand = "0.8"
regex = "1.7" regex = "1.7"
rusqlite = { version = "0.27", features = ["sqlcipher"] } reqwest = { version = "0.11.14", features = ["json"] }
rusqlite = { version = "0.28", features = ["sqlcipher"] }
rust-hsluv = "0.1" rust-hsluv = "0.1"
sanitize-filename = "0.4" sanitize-filename = "0.4"
sendme = { git = "https://github.com/n0-computer/sendme", branch = "main", default-features = false } sendme = { git = "https://github.com/n0-computer/sendme", branch = "main", default-features = false }
@@ -73,21 +76,17 @@ sha2 = "0.10"
smallvec = "1" smallvec = "1"
strum = "0.24" strum = "0.24"
strum_macros = "0.24" strum_macros = "0.24"
thiserror = "1"
toml = "0.7"
url = "2"
uuid = { version = "1", features = ["serde", "v4"] }
fast-socks5 = "0.8"
humansize = "2"
qrcodegen = "1.7.0"
tagger = "4.3.4" tagger = "4.3.4"
textwrap = "0.16.0" textwrap = "0.16.0"
async-channel = "1.8.0" thiserror = "1"
futures-lite = "1.12.0"
tokio-stream = { version = "0.1.11", features = ["fs"] }
tokio-io-timeout = "1.2.0" tokio-io-timeout = "1.2.0"
reqwest = { version = "0.11.14", features = ["json"] } tokio-stream = { version = "0.1.11", features = ["fs"] }
async_zip = { version = "0.0.9", default-features = false, features = ["deflate"] } tokio-tar = { version = "0.3" } # TODO: integrate tokio into async-tar
tokio = { version = "1", features = ["fs", "rt-multi-thread", "macros"] }
toml = "0.7"
trust-dns-resolver = "0.22"
url = "2"
uuid = { version = "1", features = ["serde", "v4"] }
[dev-dependencies] [dev-dependencies]
ansi_term = "0.12.0" ansi_term = "0.12.0"

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "deltachat_ffi" name = "deltachat_ffi"
version = "1.108.0" version = "1.109.0"
description = "Deltachat FFI" description = "Deltachat FFI"
edition = "2018" edition = "2018"
readme = "README.md" readme = "README.md"

View File

@@ -4443,6 +4443,18 @@ void dc_msg_set_text (dc_msg_t* msg, const char* text);
void dc_msg_set_html (dc_msg_t* msg, const char* html); void dc_msg_set_html (dc_msg_t* msg, const char* html);
/**
* Sets the email's subject. If it's empty, a default subject
* will be used (e.g. `Message from Alice` or `Re: <last subject>`).
* This does not alter any information in the database.
*
* @memberof dc_msg_t
* @param msg The message object.
* @param subject The new subject.
*/
void dc_msg_set_subject (dc_msg_t* msg, const char* subject);
/** /**
* Set different sender name for a message. * Set different sender name for a message.
* This overrides the name set by the dc_set_config()-option `displayname`. * This overrides the name set by the dc_set_config()-option `displayname`.

View File

@@ -2004,12 +2004,10 @@ pub unsafe extern "C" fn dc_create_contact(
let ctx = &*context; let ctx = &*context;
let name = to_string_lossy(name); let name = to_string_lossy(name);
block_on(async move { block_on(Contact::create(ctx, &name, &to_string_lossy(addr)))
Contact::create(ctx, &name, &to_string_lossy(addr)) .log_err(ctx, "Cannot create contact")
.await .map(|id| id.to_u32())
.map(|id| id.to_u32()) .unwrap_or(0)
.unwrap_or(0)
})
} }
#[no_mangle] #[no_mangle]
@@ -3599,6 +3597,16 @@ pub unsafe extern "C" fn dc_msg_set_html(msg: *mut dc_msg_t, html: *const libc::
ffi_msg.message.set_html(to_opt_string_lossy(html)) ffi_msg.message.set_html(to_opt_string_lossy(html))
} }
#[no_mangle]
pub unsafe extern "C" fn dc_msg_set_subject(msg: *mut dc_msg_t, subject: *const libc::c_char) {
if msg.is_null() {
eprintln!("ignoring careless call to dc_msg_get_subject()");
return;
}
let ffi_msg = &mut *msg;
ffi_msg.message.set_subject(to_string_lossy(subject));
}
#[no_mangle] #[no_mangle]
pub unsafe extern "C" fn dc_msg_set_override_sender_name( pub unsafe extern "C" fn dc_msg_set_override_sender_name(
msg: *mut dc_msg_t, msg: *mut dc_msg_t,

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "deltachat-jsonrpc" name = "deltachat-jsonrpc"
version = "1.108.0" version = "1.109.0"
description = "DeltaChat JSON-RPC API" description = "DeltaChat JSON-RPC API"
edition = "2021" edition = "2021"
default-run = "deltachat-jsonrpc-server" default-run = "deltachat-jsonrpc-server"
@@ -28,7 +28,7 @@ sanitize-filename = "0.4"
walkdir = "2.3.2" walkdir = "2.3.2"
# optional dependencies # optional dependencies
axum = { version = "0.6.4", optional = true, features = ["ws"] } axum = { version = "0.5.9", optional = true, features = ["ws"] }
env_logger = { version = "0.10.0", optional = true } env_logger = { version = "0.10.0", optional = true }
[dev-dependencies] [dev-dependencies]

View File

@@ -48,5 +48,5 @@
}, },
"type": "module", "type": "module",
"types": "dist/deltachat.d.ts", "types": "dist/deltachat.d.ts",
"version": "1.108.0" "version": "1.109.0"
} }

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "deltachat-repl" name = "deltachat-repl"
version = "1.108.0" version = "1.109.0"
edition = "2021" edition = "2021"
[dependencies] [dependencies]
@@ -10,7 +10,7 @@ deltachat = { path = "..", features = ["internals"]}
dirs = "4" dirs = "4"
log = "0.4.16" log = "0.4.16"
pretty_env_logger = "0.4" pretty_env_logger = "0.4"
rusqlite = "0.27" rusqlite = "0.28"
rustyline = "10" rustyline = "10"
tokio = { version = "1", features = ["fs", "rt-multi-thread", "macros"] } tokio = { version = "1", features = ["fs", "rt-multi-thread", "macros"] }

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "deltachat-rpc-server" name = "deltachat-rpc-server"
version = "1.108.0" version = "1.109.0"
description = "DeltaChat JSON-RPC server" description = "DeltaChat JSON-RPC server"
edition = "2021" edition = "2021"
readme = "README.md" readme = "README.md"

View File

@@ -60,5 +60,5 @@
"test:mocha": "mocha -r esm node/test/test.js --growl --reporter=spec --bail --exit" "test:mocha": "mocha -r esm node/test/test.js --growl --reporter=spec --bail --exit"
}, },
"types": "node/dist/index.d.ts", "types": "node/dist/index.d.ts",
"version": "1.108.0" "version": "1.109.0"
} }

View File

@@ -6,7 +6,7 @@ from array import array
from contextlib import contextmanager from contextlib import contextmanager
from email.utils import parseaddr from email.utils import parseaddr
from threading import Event from threading import Event
from typing import Any, Dict, Generator, List, Optional, Union from typing import Any, Dict, Generator, List, Optional, Union, TYPE_CHECKING
from . import const, hookspec from . import const, hookspec
from .capi import ffi, lib from .capi import ffi, lib
@@ -22,6 +22,9 @@ from .cutil import (
from .message import Message from .message import Message
from .tracker import ConfigureTracker, ImexTracker from .tracker import ConfigureTracker, ImexTracker
if TYPE_CHECKING:
from .events import FFIEventTracker
class MissingCredentials(ValueError): class MissingCredentials(ValueError):
"""Account is missing `addr` and `mail_pw` config values.""" """Account is missing `addr` and `mail_pw` config values."""
@@ -60,6 +63,9 @@ class Account:
MissingCredentials = MissingCredentials MissingCredentials = MissingCredentials
_logid: str
_evtracker: "FFIEventTracker"
def __init__(self, db_path, os_name=None, logging=True, closed=False) -> None: def __init__(self, db_path, os_name=None, logging=True, closed=False) -> None:
from .events import EventThread from .events import EventThread

View File

@@ -207,14 +207,14 @@ class IdleManager:
return res return res
def wait_for_new_message(self, timeout=None) -> bytes: def wait_for_new_message(self, timeout=None) -> bytes:
while 1: while True:
for item in self.check(timeout=timeout): for item in self.check(timeout=timeout):
if b"EXISTS" in item or b"RECENT" in item: if b"EXISTS" in item or b"RECENT" in item:
return item return item
def wait_for_seen(self, timeout=None) -> int: def wait_for_seen(self, timeout=None) -> int:
"""Return first message with SEEN flag from a running idle-stream.""" """Return first message with SEEN flag from a running idle-stream."""
while 1: while True:
for item in self.check(timeout=timeout): for item in self.check(timeout=timeout):
if FETCH in item: if FETCH in item:
self.log(str(item)) self.log(str(item))

View File

@@ -108,7 +108,7 @@ class FFIEventTracker:
return ev return ev
def iter_events(self, timeout=None, check_error=True): def iter_events(self, timeout=None, check_error=True):
while 1: while True:
yield self.get(timeout=timeout, check_error=check_error) yield self.get(timeout=timeout, check_error=check_error)
def get_matching(self, event_name_regex, check_error=True, timeout=None): def get_matching(self, event_name_regex, check_error=True, timeout=None):
@@ -119,14 +119,14 @@ class FFIEventTracker:
def get_info_contains(self, regex: str) -> FFIEvent: def get_info_contains(self, regex: str) -> FFIEvent:
rex = re.compile(regex) rex = re.compile(regex)
while 1: while True:
ev = self.get_matching("DC_EVENT_INFO") ev = self.get_matching("DC_EVENT_INFO")
if rex.search(ev.data2): if rex.search(ev.data2):
return ev return ev
def get_info_regex_groups(self, regex, check_error=True): def get_info_regex_groups(self, regex, check_error=True):
rex = re.compile(regex) rex = re.compile(regex)
while 1: while True:
ev = self.get_matching("DC_EVENT_INFO", check_error=check_error) ev = self.get_matching("DC_EVENT_INFO", check_error=check_error)
m = rex.match(ev.data2) m = rex.match(ev.data2)
if m is not None: if m is not None:
@@ -137,7 +137,7 @@ class FFIEventTracker:
This only works reliably if the connectivity doesn't change This only works reliably if the connectivity doesn't change
again too quickly, otherwise we might miss it. again too quickly, otherwise we might miss it.
""" """
while 1: while True:
if self.account.get_connectivity() == connectivity: if self.account.get_connectivity() == connectivity:
return return
self.get_matching("DC_EVENT_CONNECTIVITY_CHANGED") self.get_matching("DC_EVENT_CONNECTIVITY_CHANGED")
@@ -146,7 +146,7 @@ class FFIEventTracker:
"""Wait until the connectivity changes to `expected_next`. """Wait until the connectivity changes to `expected_next`.
Fails the test if it changes to something else. Fails the test if it changes to something else.
""" """
while 1: while True:
current = self.account.get_connectivity() current = self.account.get_connectivity()
if current == expected_next: if current == expected_next:
return return
@@ -156,7 +156,7 @@ class FFIEventTracker:
self.get_matching("DC_EVENT_CONNECTIVITY_CHANGED") self.get_matching("DC_EVENT_CONNECTIVITY_CHANGED")
def wait_for_all_work_done(self): def wait_for_all_work_done(self):
while 1: while True:
if self.account.all_work_done(): if self.account.all_work_done():
return return
self.get_matching("DC_EVENT_CONNECTIVITY_CHANGED") self.get_matching("DC_EVENT_CONNECTIVITY_CHANGED")
@@ -164,7 +164,7 @@ class FFIEventTracker:
def ensure_event_not_queued(self, event_name_regex): def ensure_event_not_queued(self, event_name_regex):
__tracebackhide__ = True __tracebackhide__ = True
rex = re.compile(f"(?:{event_name_regex}).*") rex = re.compile(f"(?:{event_name_regex}).*")
while 1: while True:
try: try:
ev = self._event_queue.get(False) ev = self._event_queue.get(False)
except Empty: except Empty:
@@ -173,7 +173,7 @@ class FFIEventTracker:
assert not rex.match(ev.name), f"event found {ev}" assert not rex.match(ev.name), f"event found {ev}"
def wait_securejoin_inviter_progress(self, target): def wait_securejoin_inviter_progress(self, target):
while 1: while True:
event = self.get_matching("DC_EVENT_SECUREJOIN_INVITER_PROGRESS") event = self.get_matching("DC_EVENT_SECUREJOIN_INVITER_PROGRESS")
if event.data2 >= target: if event.data2 >= target:
print(f"** SECUREJOINT-INVITER PROGRESS {target}", self.account) print(f"** SECUREJOINT-INVITER PROGRESS {target}", self.account)

View File

@@ -275,6 +275,8 @@ class ACSetup:
CONFIGURED = "CONFIGURED" CONFIGURED = "CONFIGURED"
IDLEREADY = "IDLEREADY" IDLEREADY = "IDLEREADY"
_configured_events: Queue
def __init__(self, testprocess, init_time): def __init__(self, testprocess, init_time):
self._configured_events = Queue() self._configured_events = Queue()
self._account2state = {} self._account2state = {}
@@ -307,7 +309,7 @@ class ACSetup:
def wait_one_configured(self, account): def wait_one_configured(self, account):
"""wait until this account has successfully configured.""" """wait until this account has successfully configured."""
if self._account2state[account] == self.CONFIGURING: if self._account2state[account] == self.CONFIGURING:
while 1: while True:
acc = self._pop_config_success() acc = self._pop_config_success()
if acc == account: if acc == account:
break break
@@ -376,8 +378,13 @@ class ACSetup:
class ACFactory: class ACFactory:
"""Account factory"""
init_time: float
_finalizers: List[Callable[[], None]] _finalizers: List[Callable[[], None]]
_accounts: List[Account] _accounts: List[Account]
_acsetup: ACSetup
_preconfigured_keys: List[str]
def __init__(self, request, testprocess, tmpdir, data) -> None: def __init__(self, request, testprocess, tmpdir, data) -> None:
self.init_time = time.time() self.init_time = time.time()
@@ -429,14 +436,15 @@ class ACFactory:
assert "addr" in configdict and "mail_pw" in configdict assert "addr" in configdict and "mail_pw" in configdict
return configdict return configdict
def _get_cached_account(self, addr): def _get_cached_account(self, addr) -> Optional[Account]:
if addr in self.testprocess._addr2files: if addr in self.testprocess._addr2files:
return self._getaccount(addr) return self._getaccount(addr)
return None
def get_unconfigured_account(self, closed=False): def get_unconfigured_account(self, closed=False) -> Account:
return self._getaccount(closed=closed) return self._getaccount(closed=closed)
def _getaccount(self, try_cache_addr=None, closed=False): def _getaccount(self, try_cache_addr=None, closed=False) -> Account:
logid = f"ac{len(self._accounts) + 1}" logid = f"ac{len(self._accounts) + 1}"
# we need to use fixed database basename for maybe_cache_* functions to work # we need to use fixed database basename for maybe_cache_* functions to work
path = self.tmpdir.mkdir(logid).join("dc.db") path = self.tmpdir.mkdir(logid).join("dc.db")
@@ -450,10 +458,10 @@ class ACFactory:
self._accounts.append(ac) self._accounts.append(ac)
return ac return ac
def set_logging_default(self, logging): def set_logging_default(self, logging) -> None:
self._logging = bool(logging) self._logging = bool(logging)
def remove_preconfigured_keys(self): def remove_preconfigured_keys(self) -> None:
self._preconfigured_keys = [] self._preconfigured_keys = []
def _preconfigure_key(self, account, addr): def _preconfigure_key(self, account, addr):
@@ -491,7 +499,7 @@ class ACFactory:
self._acsetup.init_logging(ac) self._acsetup.init_logging(ac)
return ac return ac
def new_online_configuring_account(self, cloned_from=None, cache=False, **kwargs): def new_online_configuring_account(self, cloned_from=None, cache=False, **kwargs) -> Account:
if cloned_from is None: if cloned_from is None:
configdict = self.get_next_liveconfig() configdict = self.get_next_liveconfig()
else: else:
@@ -513,7 +521,7 @@ class ACFactory:
self._acsetup.start_configure(ac) self._acsetup.start_configure(ac)
return ac return ac
def prepare_account_from_liveconfig(self, configdict): def prepare_account_from_liveconfig(self, configdict) -> Account:
ac = self.get_unconfigured_account() ac = self.get_unconfigured_account()
assert "addr" in configdict and "mail_pw" in configdict, configdict assert "addr" in configdict and "mail_pw" in configdict, configdict
configdict.setdefault("bcc_self", False) configdict.setdefault("bcc_self", False)
@@ -523,11 +531,11 @@ class ACFactory:
self._preconfigure_key(ac, configdict["addr"]) self._preconfigure_key(ac, configdict["addr"])
return ac return ac
def wait_configured(self, account): def wait_configured(self, account) -> None:
"""Wait until the specified account has successfully completed configure.""" """Wait until the specified account has successfully completed configure."""
self._acsetup.wait_one_configured(account) self._acsetup.wait_one_configured(account)
def bring_accounts_online(self): def bring_accounts_online(self) -> None:
print("bringing accounts online") print("bringing accounts online")
self._acsetup.bring_online() self._acsetup.bring_online()
print("all accounts online") print("all accounts online")
@@ -630,7 +638,7 @@ class BotProcess:
def _run_stdout_thread(self) -> None: def _run_stdout_thread(self) -> None:
try: try:
while 1: while True:
line = self.popen.stdout.readline() line = self.popen.stdout.readline()
if not line: if not line:
break break
@@ -651,7 +659,7 @@ class BotProcess:
for next_pattern in patterns: for next_pattern in patterns:
print("+++FNMATCH:", next_pattern) print("+++FNMATCH:", next_pattern)
ignored = [] ignored = []
while 1: while True:
line = self.stdout_queue.get() line = self.stdout_queue.get()
if line is None: if line is None:
if ignored: if ignored:

View File

@@ -85,7 +85,7 @@ class ConfigureTracker:
self._imap_finished.wait() self._imap_finished.wait()
def wait_progress(self, data1=None): def wait_progress(self, data1=None):
while 1: while True:
evdata = self._progress.get() evdata = self._progress.get()
if data1 is None or evdata == data1: if data1 is None or evdata == data1:
break break

View File

@@ -8,7 +8,7 @@ envlist =
[testenv] [testenv]
commands = commands =
pytest -n6 --extra-info --reruns 2 --reruns-delay 5 -v -rsXx --ignored --strict-tls {posargs: tests examples} pytest -n6 --extra-info -v -rsXx --ignored --strict-tls {posargs: tests examples}
pip wheel . -w {toxworkdir}/wheelhouse --no-deps pip wheel . -w {toxworkdir}/wheelhouse --no-deps
setenv = setenv =
# Avoid stack overflow when Rust core is built without optimizations. # Avoid stack overflow when Rust core is built without optimizations.
@@ -21,7 +21,6 @@ passenv =
RUSTC_WRAPPER RUSTC_WRAPPER
deps = deps =
pytest pytest
pytest-rerunfailures
pytest-timeout pytest-timeout
pytest-xdist pytest-xdist
pdbpp pdbpp

View File

@@ -1,7 +1,6 @@
//! # Account manager module. //! # Account manager module.
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::future::Future;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use anyhow::{ensure, Context as _, Result}; use anyhow::{ensure, Context as _, Result};
@@ -151,7 +150,7 @@ impl Accounts {
if let Some(cfg) = self.config.get_account(id) { if let Some(cfg) = self.config.get_account(id) {
let account_path = self.dir.join(cfg.dir); let account_path = self.dir.join(cfg.dir);
try_many_times(|| fs::remove_dir_all(&account_path)) fs::remove_dir_all(&account_path)
.await .await
.context("failed to remove account data")?; .context("failed to remove account data")?;
} }
@@ -187,10 +186,10 @@ impl Accounts {
fs::create_dir_all(self.dir.join(&account_config.dir)) fs::create_dir_all(self.dir.join(&account_config.dir))
.await .await
.context("failed to create dir")?; .context("failed to create dir")?;
try_many_times(|| fs::rename(&dbfile, &new_dbfile)) fs::rename(&dbfile, &new_dbfile)
.await .await
.context("failed to rename dbfile")?; .context("failed to rename dbfile")?;
try_many_times(|| fs::rename(&blobdir, &new_blobdir)) fs::rename(&blobdir, &new_blobdir)
.await .await
.context("failed to rename blobdir")?; .context("failed to rename blobdir")?;
if walfile.exists() { if walfile.exists() {
@@ -215,7 +214,7 @@ impl Accounts {
} }
Err(err) => { Err(err) => {
let account_path = std::path::PathBuf::from(&account_config.dir); let account_path = std::path::PathBuf::from(&account_config.dir);
try_many_times(|| fs::remove_dir_all(&account_path)) fs::remove_dir_all(&account_path)
.await .await
.context("failed to remove account data")?; .context("failed to remove account data")?;
self.config.remove_account(account_config.id).await?; self.config.remove_account(account_config.id).await?;
@@ -472,33 +471,6 @@ impl Config {
} }
} }
/// Spend up to 1 minute trying to do the operation.
///
/// Files may remain locked up to 30 seconds due to r2d2 bug:
/// <https://github.com/sfackler/r2d2/issues/99>
async fn try_many_times<F, Fut, T>(f: F) -> std::result::Result<(), T>
where
F: Fn() -> Fut,
Fut: Future<Output = std::result::Result<(), T>>,
{
let mut counter = 0;
loop {
counter += 1;
if let Err(err) = f().await {
if counter > 60 {
return Err(err);
}
// Wait 1 second and try again.
tokio::time::sleep(std::time::Duration::from_millis(1000)).await;
} else {
break;
}
}
Ok(())
}
/// Configuration of a single account. /// Configuration of a single account.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
struct AccountConfig { struct AccountConfig {

View File

@@ -3397,7 +3397,7 @@ pub async fn forward_msgs(context: &Context, msg_ids: &[MsgId], chat_id: ChatId)
msg.param.remove(Param::WebxdcSummaryTimestamp); msg.param.remove(Param::WebxdcSummaryTimestamp);
msg.in_reply_to = None; msg.in_reply_to = None;
// do not leak data as group names; a default subject is generated by mimfactory // do not leak data as group names; a default subject is generated by mimefactory
msg.subject = "".to_string(); msg.subject = "".to_string();
let new_msg_id: MsgId; let new_msg_id: MsgId;

View File

@@ -13,6 +13,7 @@ use async_channel::{self as channel, Receiver, Sender};
use deltachat_derive::{FromSql, ToSql}; use deltachat_derive::{FromSql, ToSql};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use rusqlite::OptionalExtension;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::task; use tokio::task;
use tokio::time::{timeout, Duration}; use tokio::time::{timeout, Duration};
@@ -520,8 +521,6 @@ impl Contact {
/// Depending on the origin, both, "row_name" and "row_authname" are updated from "name". /// Depending on the origin, both, "row_name" and "row_authname" are updated from "name".
/// ///
/// Returns the contact_id and a `Modifier` value indicating if a modification occurred. /// Returns the contact_id and a `Modifier` value indicating if a modification occurred.
///
/// Returns None if the contact with such address cannot exist.
pub(crate) async fn add_or_lookup( pub(crate) async fn add_or_lookup(
context: &Context, context: &Context,
name: &str, name: &str,
@@ -566,14 +565,12 @@ impl Contact {
); );
let mut update_addr = false; let mut update_addr = false;
let mut row_id = 0;
if let Some((id, row_name, row_addr, row_origin, row_authname)) = context let row_id = context.sql.transaction(|transaction| {
.sql let row = transaction.query_row(
.query_row_optional( "SELECT id, name, addr, origin, authname
"SELECT id, name, addr, origin, authname \ FROM contacts WHERE addr=? COLLATE NOCASE",
FROM contacts WHERE addr=? COLLATE NOCASE;", [addr.to_string()],
paramsv![addr.to_string()],
|row| { |row| {
let row_id: isize = row.get(0)?; let row_id: isize = row.get(0)?;
let row_name: String = row.get(1)?; let row_name: String = row.get(1)?;
@@ -582,120 +579,130 @@ impl Contact {
let row_authname: String = row.get(4)?; let row_authname: String = row.get(4)?;
Ok((row_id, row_name, row_addr, row_origin, row_authname)) Ok((row_id, row_name, row_addr, row_origin, row_authname))
}, }).optional()?;
)
.await?
{
let update_name = manual && name != row_name;
let update_authname = !manual
&& name != row_authname
&& !name.is_empty()
&& (origin >= row_origin
|| origin == Origin::IncomingUnknownFrom
|| row_authname.is_empty());
row_id = u32::try_from(id)?; let row_id;
if origin >= row_origin && addr.as_ref() != row_addr { if let Some((id, row_name, row_addr, row_origin, row_authname)) = row {
update_addr = true; let update_name = manual && name != row_name;
} let update_authname = !manual
if update_name || update_authname || update_addr || origin > row_origin { && name != row_authname
let new_name = if update_name { && !name.is_empty()
name.to_string() && (origin >= row_origin
} else { || origin == Origin::IncomingUnknownFrom
row_name || row_authname.is_empty());
};
context row_id = u32::try_from(id)?;
.sql if origin >= row_origin && addr.as_ref() != row_addr {
.execute( update_addr = true;
"UPDATE contacts SET name=?, addr=?, origin=?, authname=? WHERE id=?;", }
paramsv![ if update_name || update_authname || update_addr || origin > row_origin {
new_name, let new_name = if update_name {
if update_addr { name.to_string()
addr.to_string() } else {
} else { row_name
row_addr };
},
if origin > row_origin {
origin
} else {
row_origin
},
if update_authname {
name.to_string()
} else {
row_authname
},
row_id
],
)
.await
.ok();
if update_name || update_authname { transaction
// Update the contact name also if it is used as a group name. .execute(
// This is one of the few duplicated data, however, getting the chat list is easier this way. "UPDATE contacts SET name=?, addr=?, origin=?, authname=? WHERE id=?;",
let chat_id: Option<i32> = context.sql.query_get_value( paramsv![
"SELECT id FROM chats WHERE type=? AND id IN(SELECT chat_id FROM chats_contacts WHERE contact_id=?)", new_name,
paramsv![Chattype::Single, isize::try_from(row_id)?] if update_addr {
).await?; addr.to_string()
if let Some(chat_id) = chat_id { } else {
let contact = Contact::get_by_id(context, ContactId::new(row_id)).await?; row_addr
let chat_name = contact.get_display_name(); },
match context if origin > row_origin {
.sql origin
.execute( } else {
"UPDATE chats SET name=?1 WHERE id=?2 AND name!=?3", row_origin
paramsv![chat_name, chat_id, chat_name], },
) if update_authname {
.await name.to_string()
{ } else {
Err(err) => warn!(context, "Can't update chat name: {}", err), row_authname
Ok(count) => { },
if count > 0 { row_id
// Chat name updated ],
context.emit_event(EventType::ChatModified(ChatId::new( )?;
chat_id.try_into()?,
))); if update_name || update_authname {
} // Update the contact name also if it is used as a group name.
// This is one of the few duplicated data, however, getting the chat list is easier this way.
let chat_id: Option<ChatId> = transaction.query_row(
"SELECT id FROM chats WHERE type=? AND id IN(SELECT chat_id FROM chats_contacts WHERE contact_id=?)",
params![Chattype::Single, isize::try_from(row_id)?],
|row| {
let chat_id: ChatId = row.get(0)?;
Ok(chat_id)
}
).optional()?;
if let Some(chat_id) = chat_id {
let contact_id = ContactId::new(row_id);
let (addr, name, authname) =
transaction.query_row(
"SELECT addr, name, authname
FROM contacts
WHERE id=?",
params![contact_id],
|row| {
let addr: String = row.get(0)?;
let name: String = row.get(1)?;
let authname: String = row.get(2)?;
Ok((addr, name, authname))
})?;
let chat_name = if !name.is_empty() {
name
} else if !authname.is_empty() {
authname
} else {
addr
};
let count = transaction.execute(
"UPDATE chats SET name=?1 WHERE id=?2 AND name!=?1",
params![chat_name, chat_id])?;
if count > 0 {
// Chat name updated
context.emit_event(EventType::ChatModified(chat_id));
} }
} }
} }
sth_modified = Modifier::Modified;
} }
sth_modified = Modifier::Modified;
}
} else {
let update_name = manual;
let update_authname = !manual;
if let Ok(new_row_id) = context
.sql
.insert(
"INSERT INTO contacts (name, addr, origin, authname) VALUES(?, ?, ?, ?);",
paramsv![
if update_name {
name.to_string()
} else {
"".to_string()
},
addr,
origin,
if update_authname {
name.to_string()
} else {
"".to_string()
}
],
)
.await
{
row_id = u32::try_from(new_row_id)?;
sth_modified = Modifier::Created;
info!(context, "added contact id={} addr={}", row_id, &addr);
} else { } else {
error!(context, "Cannot add contact."); let update_name = manual;
let update_authname = !manual;
transaction
.execute(
"INSERT INTO contacts (name, addr, origin, authname)
VALUES (?, ?, ?, ?);",
params![
if update_name {
name.to_string()
} else {
"".to_string()
},
addr,
origin,
if update_authname {
name.to_string()
} else {
"".to_string()
}
],
)?;
sth_modified = Modifier::Created;
row_id = u32::try_from(transaction.last_insert_rowid())?;
info!(context, "added contact id={} addr={}", row_id, &addr);
} }
} Ok(row_id)
}).await?;
Ok((ContactId::new(row_id), sth_modified)) Ok((ContactId::new(row_id), sth_modified))
} }

View File

@@ -512,6 +512,9 @@ fn get_next_backup_path(folder: &Path, backup_time: i64) -> Result<(PathBuf, Pat
bail!("could not create backup file, disk full?"); bail!("could not create backup file, disk full?");
} }
/// Exports the database to a separate file with the given passphrase.
///
/// Set passphrase to empty string to export the database unencrypted.
async fn export_backup(context: &Context, dir: &Path, passphrase: String) -> Result<()> { async fn export_backup(context: &Context, dir: &Path, passphrase: String) -> Result<()> {
// get a fine backup file name (the name includes the date so that multiple backup instances are possible) // get a fine backup file name (the name includes the date so that multiple backup instances are possible)
let now = time(); let now = time();
@@ -737,9 +740,9 @@ where
Ok(()) Ok(())
} }
/// Exports the database to *file*, encrypted using *passphrase*. /// Exports the database to *dest*, encrypted using *passphrase*.
/// ///
/// The directory of *file* must already exist, if *file* itself exists it will be /// The directory of *dest* must already exist, if *dest* itself exists it will be
/// overwritten. /// overwritten.
/// ///
/// This also verifies that IO is not running during the export. /// This also verifies that IO is not running during the export.
@@ -750,20 +753,31 @@ async fn export_database(context: &Context, dest: &Path, passphrase: String) ->
); );
let now = time().try_into().context("32-bit UNIX time overflow")?; let now = time().try_into().context("32-bit UNIX time overflow")?;
// TODO: Maybe introduce camino crate for UTF-8 paths where we need them.
let dest = dest
.to_str()
.with_context(|| format!("path {} is not valid unicode", dest.display()))?;
context.sql.set_raw_config_int("backup_time", now).await?; context.sql.set_raw_config_int("backup_time", now).await?;
sql::housekeeping(context).await.ok_or_log(context); sql::housekeeping(context).await.ok_or_log(context);
context let conn = context.sql.get_conn().await?;
.sql tokio::task::block_in_place(move || {
.execute("VACUUM;", paramsv![]) conn.execute("VACUUM;", params![])
.await .map_err(|err| warn!(context, "Vacuum failed, exporting anyway {err}"))
.map_err(|e| warn!(context, "Vacuum failed, exporting anyway {}", e)) .ok();
.ok(); conn.execute(
context "ATTACH DATABASE ? AS backup KEY ?",
.sql paramsv![dest, passphrase],
.export(dest, passphrase) )
.await .context("failed to attach backup database")?;
.with_context(|| format!("failed to backup database to {}", dest.display()))?; let res = conn
Ok(()) .query_row("SELECT sqlcipher_export('backup')", [], |_row| Ok(()))
.context("failed to export to attached backup database");
conn.execute("DETACH DATABASE backup", [])
.context("failed to detach backup database")?;
res?;
Ok(())
})
} }
#[cfg(test)] #[cfg(test)]

View File

@@ -265,6 +265,8 @@ pub struct Message {
pub(crate) text: Option<String>, pub(crate) text: Option<String>,
/// Message subject. /// Message subject.
///
/// If empty, a default subject will be generated when sending.
pub(crate) subject: String, pub(crate) subject: String,
/// `Message-ID` header value. /// `Message-ID` header value.
@@ -795,6 +797,12 @@ impl Message {
self.text = text; self.text = text;
} }
/// Sets the email's subject. If it's empty, a default subject
/// will be used (e.g. `Message from Alice` or `Re: <last subject>`).
pub fn set_subject(&mut self, subject: String) {
self.subject = subject;
}
/// Sets the file associated with a message. /// Sets the file associated with a message.
/// ///
/// This function does not use the file or check if it exists, /// This function does not use the file or check if it exists,

View File

@@ -1612,6 +1612,22 @@ mod tests {
assert_eq!(maybe_encode_words("äöü"), "=?utf-8?b?w6TDtsO8?="); assert_eq!(maybe_encode_words("äöü"), "=?utf-8?b?w6TDtsO8?=");
} }
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_manually_set_subject() -> Result<()> {
let t = TestContext::new_alice().await;
let chat = t.create_chat_with_contact("bob", "bob@example.org").await;
let mut msg = Message::new(Viewtype::Text);
msg.set_subject("Subjeeeeect".to_string());
let sent_msg = t.send_msg(chat.id, &mut msg).await;
let payload = sent_msg.payload();
assert_eq!(payload.match_indices("Subject: Subjeeeeect").count(), 1);
Ok(())
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)] #[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_subject_from_mua() { async fn test_subject_from_mua() {
// 1.: Receive a mail from an MUA // 1.: Receive a mail from an MUA

View File

@@ -4,10 +4,9 @@ use std::collections::{HashMap, HashSet};
use std::convert::TryFrom; use std::convert::TryFrom;
use std::path::Path; use std::path::Path;
use std::path::PathBuf; use std::path::PathBuf;
use std::time::Duration;
use anyhow::{bail, Context as _, Result}; use anyhow::{bail, Context as _, Result};
use rusqlite::{config::DbConfig, Connection, OpenFlags}; use rusqlite::{self, config::DbConfig, Connection, OpenFlags, TransactionBehavior};
use tokio::sync::RwLock; use tokio::sync::RwLock;
use crate::blob::BlobObject; use crate::blob::BlobObject;
@@ -48,6 +47,9 @@ pub(crate) fn params_iter(iter: &[impl crate::ToSql]) -> impl Iterator<Item = &d
} }
mod migrations; mod migrations;
mod pool;
use pool::{Pool, PooledConnection};
/// A wrapper around the underlying Sqlite3 object. /// A wrapper around the underlying Sqlite3 object.
#[derive(Debug)] #[derive(Debug)]
@@ -56,7 +58,7 @@ pub struct Sql {
pub(crate) dbfile: PathBuf, pub(crate) dbfile: PathBuf,
/// SQL connection pool. /// SQL connection pool.
pool: RwLock<Option<r2d2::Pool<r2d2_sqlite::SqliteConnectionManager>>>, pool: RwLock<Option<Pool>>,
/// None if the database is not open, true if it is open with passphrase and false if it is /// None if the database is not open, true if it is open with passphrase and false if it is
/// open without a passphrase. /// open without a passphrase.
@@ -122,31 +124,6 @@ impl Sql {
// drop closes the connection // drop closes the connection
} }
/// Exports the database to a separate file with the given passphrase.
///
/// Set passphrase to empty string to export the database unencrypted.
pub(crate) async fn export(&self, path: &Path, passphrase: String) -> Result<()> {
let path_str = path
.to_str()
.with_context(|| format!("path {path:?} is not valid unicode"))?;
let conn = self.get_conn().await?;
tokio::task::block_in_place(move || {
conn.execute(
"ATTACH DATABASE ? AS backup KEY ?",
paramsv![path_str, passphrase],
)
.context("failed to attach backup database")?;
let res = conn
.query_row("SELECT sqlcipher_export('backup')", [], |_row| Ok(()))
.context("failed to export to attached backup database");
conn.execute("DETACH DATABASE backup", [])
.context("failed to detach backup database")?;
res?;
Ok(())
})
}
/// Imports the database from a separate file with the given passphrase. /// Imports the database from a separate file with the given passphrase.
pub(crate) async fn import(&self, path: &Path, passphrase: String) -> Result<()> { pub(crate) async fn import(&self, path: &Path, passphrase: String) -> Result<()> {
let path_str = path let path_str = path
@@ -192,50 +169,15 @@ impl Sql {
}) })
} }
fn new_pool( /// Creates a new connection pool.
dbfile: &Path, fn new_pool(dbfile: &Path, passphrase: String) -> Result<Pool> {
passphrase: String, let mut connections = Vec::new();
) -> Result<r2d2::Pool<r2d2_sqlite::SqliteConnectionManager>> { for _ in 0..3 {
let mut open_flags = OpenFlags::SQLITE_OPEN_NO_MUTEX; let connection = new_connection(dbfile, &passphrase)?;
open_flags.insert(OpenFlags::SQLITE_OPEN_READ_WRITE); connections.push(connection);
open_flags.insert(OpenFlags::SQLITE_OPEN_CREATE); }
// this actually creates min_idle database handles just now. let pool = Pool::new(connections);
// therefore, with_init() must not try to modify the database as otherwise
// we easily get busy-errors (eg. table-creation, journal_mode etc. should be done on only one handle)
let mgr = r2d2_sqlite::SqliteConnectionManager::file(dbfile)
.with_flags(open_flags)
.with_init(move |c| {
c.execute_batch(&format!(
"PRAGMA cipher_memory_security = OFF; -- Too slow on Android
PRAGMA secure_delete=on;
PRAGMA busy_timeout = {};
PRAGMA temp_store=memory; -- Avoid SQLITE_IOERR_GETTEMPPATH errors on Android
PRAGMA foreign_keys=on;
",
Duration::from_secs(10).as_millis()
))?;
c.pragma_update(None, "key", passphrase.clone())?;
// Try to enable auto_vacuum. This will only be
// applied if the database is new or after successful
// VACUUM, which usually happens before backup export.
// When auto_vacuum is INCREMENTAL, it is possible to
// use PRAGMA incremental_vacuum to return unused
// database pages to the filesystem.
c.pragma_update(None, "auto_vacuum", "INCREMENTAL".to_string())?;
c.pragma_update(None, "journal_mode", "WAL".to_string())?;
// Default synchronous=FULL is much slower. NORMAL is sufficient for WAL mode.
c.pragma_update(None, "synchronous", "NORMAL".to_string())?;
Ok(())
});
let pool = r2d2::Pool::builder()
.min_idle(Some(2))
.max_size(10)
.connection_timeout(Duration::from_secs(60))
.build(mgr)
.context("Can't build SQL connection pool")?;
Ok(pool) Ok(pool)
} }
@@ -393,12 +335,10 @@ impl Sql {
} }
/// Allocates a connection from the connection pool and returns it. /// Allocates a connection from the connection pool and returns it.
pub async fn get_conn( pub(crate) async fn get_conn(&self) -> Result<PooledConnection> {
&self,
) -> Result<r2d2::PooledConnection<r2d2_sqlite::SqliteConnectionManager>> {
let lock = self.pool.read().await; let lock = self.pool.read().await;
let pool = lock.as_ref().context("no SQL connection")?; let pool = lock.as_ref().context("no SQL connection")?;
let conn = pool.get()?; let conn = pool.get().await?;
Ok(conn) Ok(conn)
} }
@@ -437,6 +377,12 @@ impl Sql {
/// ///
/// If the function returns an error, the transaction will be rolled back. If it does not return an /// If the function returns an error, the transaction will be rolled back. If it does not return an
/// error, the transaction will be committed. /// error, the transaction will be committed.
///
/// Transactions started use IMMEDIATE behavior
/// rather than default DEFERRED behavior
/// to avoid "database is busy" errors
/// which may happen when DEFERRED transaction
/// is attempted to be promoted to a write transaction.
pub async fn transaction<G, H>(&self, callback: G) -> Result<H> pub async fn transaction<G, H>(&self, callback: G) -> Result<H>
where where
H: Send + 'static, H: Send + 'static,
@@ -444,7 +390,7 @@ impl Sql {
{ {
let mut conn = self.get_conn().await?; let mut conn = self.get_conn().await?;
tokio::task::block_in_place(move || { tokio::task::block_in_place(move || {
let mut transaction = conn.transaction()?; let mut transaction = conn.transaction_with_behavior(TransactionBehavior::Immediate)?;
let ret = callback(&mut transaction); let ret = callback(&mut transaction);
match ret { match ret {
@@ -642,6 +588,42 @@ impl Sql {
} }
} }
/// Creates a new SQLite connection.
///
/// `path` is the database path.
///
/// `passphrase` is the SQLCipher database passphrase.
/// Empty string if database is not encrypted.
fn new_connection(path: &Path, passphrase: &str) -> Result<Connection> {
let mut flags = OpenFlags::SQLITE_OPEN_NO_MUTEX;
flags.insert(OpenFlags::SQLITE_OPEN_READ_WRITE);
flags.insert(OpenFlags::SQLITE_OPEN_CREATE);
let conn = Connection::open_with_flags(path, flags)?;
conn.execute_batch(
"PRAGMA cipher_memory_security = OFF; -- Too slow on Android
PRAGMA secure_delete=on;
PRAGMA busy_timeout = 60000; -- 60 seconds
PRAGMA temp_store=memory; -- Avoid SQLITE_IOERR_GETTEMPPATH errors on Android
PRAGMA foreign_keys=on;
",
)?;
conn.pragma_update(None, "key", passphrase)?;
// Try to enable auto_vacuum. This will only be
// applied if the database is new or after successful
// VACUUM, which usually happens before backup export.
// When auto_vacuum is INCREMENTAL, it is possible to
// use PRAGMA incremental_vacuum to return unused
// database pages to the filesystem.
conn.pragma_update(None, "auto_vacuum", "INCREMENTAL".to_string())?;
conn.pragma_update(None, "journal_mode", "WAL".to_string())?;
// Default synchronous=FULL is much slower. NORMAL is sufficient for WAL mode.
conn.pragma_update(None, "synchronous", "NORMAL".to_string())?;
Ok(conn)
}
/// Cleanup the account to restore some storage and optimize the database. /// Cleanup the account to restore some storage and optimize the database.
pub async fn housekeeping(context: &Context) -> Result<()> { pub async fn housekeeping(context: &Context) -> Result<()> {
if let Err(err) = remove_unused_files(context).await { if let Err(err) = remove_unused_files(context).await {

102
src/sql/pool.rs Normal file
View File

@@ -0,0 +1,102 @@
//! Connection pool.
use std::ops::{Deref, DerefMut};
use std::sync::{Arc, Weak};
use anyhow::{Context, Result};
use crossbeam_queue::ArrayQueue;
use rusqlite::Connection;
use tokio::sync::{OwnedSemaphorePermit, Semaphore};
/// Inner connection pool.
#[derive(Debug)]
struct InnerPool {
/// Available connections.
connections: ArrayQueue<Connection>,
/// Counts the number of available connections.
semaphore: Arc<Semaphore>,
}
impl InnerPool {
/// Puts a connection into the pool.
///
/// The connection could be new or returned back.
fn put(&self, connection: Connection) {
self.connections.force_push(connection);
}
}
/// Pooled connection.
pub struct PooledConnection {
/// Weak reference to the pool used to return the connection back.
pool: Weak<InnerPool>,
/// Only `None` right after moving the connection back to the pool.
conn: Option<Connection>,
/// Semaphore permit, dropped after returning the connection to the pool.
_permit: OwnedSemaphorePermit,
}
impl Drop for PooledConnection {
fn drop(&mut self) {
// Put the connection back unless the pool is already dropped.
if let Some(pool) = self.pool.upgrade() {
if let Some(conn) = self.conn.take() {
pool.put(conn);
}
}
}
}
impl Deref for PooledConnection {
type Target = Connection;
fn deref(&self) -> &Connection {
self.conn.as_ref().unwrap()
}
}
impl DerefMut for PooledConnection {
fn deref_mut(&mut self) -> &mut Connection {
self.conn.as_mut().unwrap()
}
}
/// Connection pool.
#[derive(Clone, Debug)]
pub struct Pool {
/// Reference to the actual connection pool.
inner: Arc<InnerPool>,
}
impl Pool {
/// Creates a new connection pool.
pub fn new(connections: Vec<Connection>) -> Self {
let inner = Arc::new(InnerPool {
connections: ArrayQueue::new(connections.len()),
semaphore: Arc::new(Semaphore::new(connections.len())),
});
for connection in connections {
inner.connections.force_push(connection);
}
Pool { inner }
}
/// Retrieves a connection from the pool.
pub async fn get(&self) -> Result<PooledConnection> {
let permit = self.inner.semaphore.clone().acquire_owned().await?;
let conn = self
.inner
.connections
.pop()
.context("got a permit when there are no connections in the pool")?;
let conn = PooledConnection {
pool: Arc::downgrade(&self.inner),
conn: Some(conn),
_permit: permit,
};
Ok(conn)
}
}