diff --git a/.github/renovate.json b/.github/renovate.json index c4fa9ad..022de81 100644 --- a/.github/renovate.json +++ b/.github/renovate.json @@ -1,10 +1,10 @@ { - "labels": [ - "dependencies" - ], - "extends": [ - "config:base", - ":gitSignOff" - ], - "groupName": "all" - } + "labels": [ + "dependencies" + ], + "extends": [ + "config:recommended", + ":gitSignOff" + ], + "groupName": "all" +} diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 423d9c9..29e7cf1 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest if: ${{ github.ref == 'refs/heads/master' }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Set up Rust uses: dtolnay/rust-toolchain@stable with: @@ -19,14 +19,22 @@ jobs: RUSTFLAGS="-A unused" cargo clippy --all-targets --all-features --message-format=json -- -Dwarnings | clippy-sarif | tee results.sarif | sarif-fmt - name: Upload SARIF file - uses: github/codeql-action/upload-sarif@v3 + uses: github/codeql-action/upload-sarif@v4 with: sarif_file: results.sarif static-code-check: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 + - name: Install build dependencies + run: | + need_install=() + cmake --version &>/dev/null || need_install+=(cmake) + clang --version &>/dev/null || need_install+=(clang libclang-dev) + if [ ${#need_install[@]} -gt 0 ]; then + sudo apt-get update && sudo apt-get install -y "${need_install[@]}" + fi - name: Set up Rust uses: dtolnay/rust-toolchain@stable with: @@ -40,7 +48,16 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v6 + + - name: Install build dependencies + run: | + need_install=() + cmake --version &>/dev/null || need_install+=(cmake) + clang --version &>/dev/null || need_install+=(clang libclang-dev) + if [ ${#need_install[@]} -gt 0 ]; then + sudo apt-get update && sudo apt-get install -y "${need_install[@]}" + fi - name: Set up Rust uses: dtolnay/rust-toolchain@stable diff --git a/Cargo.lock b/Cargo.lock index 131cf1d..0b56aaf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3,28 +3,19 @@ version = 4 [[package]] -name = "addr2line" -version = "0.24.2" +name = "aho-corasick" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" dependencies = [ - "gimli", + "memchr", ] [[package]] -name = "adler2" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" - -[[package]] -name = "aho-corasick" -version = "1.1.3" +name = "anyhow" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" -dependencies = [ - "memchr", -] +checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea" [[package]] name = "assert-json-diff" @@ -38,9 +29,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.83" +version = "0.1.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", @@ -63,24 +54,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] -name = "autocfg" -version = "1.4.0" +name = "aws-lc-rs" +version = "1.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" +checksum = "6a88aab2464f1f25453baa7a07c84c5b7684e274054ba06817f382357f77a288" +dependencies = [ + "aws-lc-sys", + "zeroize", +] [[package]] -name = "backtrace" -version = "0.3.74" +name = "aws-lc-sys" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" +checksum = "b45afffdee1e7c9126814751f88dddc747f41d91da16c9551a0f1e8a11e788a1" dependencies = [ - "addr2line", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", - "windows-targets 0.52.6", + "cc", + "cmake", + "dunce", + "fs_extra", ] [[package]] @@ -89,11 +81,31 @@ version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" +[[package]] +name = "bindgen" +version = "0.72.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895" +dependencies = [ + "bitflags", + "cexpr", + "clang-sys", + "itertools", + "log", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", + "syn", +] + [[package]] name = "bitflags" -version = "2.6.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" [[package]] name = "block-buffer" @@ -106,28 +118,25 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" - -[[package]] -name = "byteorder" -version = "1.5.0" +version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" +checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" [[package]] name = "bytes" -version = "1.9.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" [[package]] name = "cc" -version = "1.2.1" +version = "1.2.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd9de9f2205d5ef3fd67e685b0df337994ddd4495e2a28d185500d0e1edfea47" +checksum = "cd4932aefd12402b36c60956a4fe0035421f544799057659ff86f923657aada3" dependencies = [ + "find-msvc-tools", + "jobserver", + "libc", "shlex", ] @@ -137,20 +146,65 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + [[package]] name = "cfg-if" -version = "1.0.0" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chacha20" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f8d983286843e49675a4b7a2d174efe136dc93a18d69130dd18198a6c167601" +dependencies = [ + "cfg-if", + "cpufeatures 0.3.0", + "rand_core 0.10.0", +] + +[[package]] +name = "clang-sys" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "cmake" +version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +checksum = "75443c44cd6b379beb8c5b45d85d0773baf31cce901fe7bb252f4eff3008ef7d" +dependencies = [ + "cc", +] [[package]] name = "colored" -version = "2.1.0" +version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" +checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e" dependencies = [ - "lazy_static", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -165,19 +219,9 @@ dependencies = [ [[package]] name = "core-foundation" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" dependencies = [ "core-foundation-sys", "libc", @@ -191,18 +235,27 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.16" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "cpufeatures" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" +checksum = "8b2a41393f66f16b0823bb79094d54ac5fbd34ab292ddafb9a0456ac9f87d201" dependencies = [ "libc", ] [[package]] name = "crypto-common" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" dependencies = [ "generic-array", "typenum", @@ -229,6 +282,18 @@ dependencies = [ "syn", ] +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + [[package]] name = "encoding_rs" version = "0.8.35" @@ -240,25 +305,40 @@ dependencies = [ [[package]] name = "equivalent" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" -version = "0.3.9" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.60.2", ] [[package]] name = "fastrand" -version = "2.2.0" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "file-locker" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75ae8b5984a4863d8a32109a848d038bd6d914f20f010cc141375f7a183c41cf" +dependencies = [ + "nix", +] + +[[package]] +name = "find-msvc-tools" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4" +checksum = "f449e6c6c08c865631d4890cfacf252b3d396c9bcc83adb6623cdb02a8336c41" [[package]] name = "fnv" @@ -266,23 +346,36 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + [[package]] name = "form_urlencoded" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ "percent-encoding", ] +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + [[package]] name = "futures" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" dependencies = [ "futures-channel", "futures-core", + "futures-executor", "futures-io", "futures-sink", "futures-task", @@ -291,9 +384,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" dependencies = [ "futures-core", "futures-sink", @@ -301,21 +394,32 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.31" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" + +[[package]] +name = "futures-executor" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] [[package]] name = "futures-io" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" [[package]] name = "futures-macro" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" dependencies = [ "proc-macro2", "quote", @@ -324,15 +428,15 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" [[package]] name = "futures-task" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" [[package]] name = "futures-timer" @@ -346,9 +450,9 @@ dependencies = [ [[package]] name = "futures-util" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" dependencies = [ "futures-channel", "futures-core", @@ -358,7 +462,6 @@ dependencies = [ "futures-task", "memchr", "pin-project-lite", - "pin-utils", "slab", ] @@ -374,13 +477,43 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "r-efi", + "wasip2", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "139ef39800118c7683f2fd3c98c1b23c09ae076556b435f8e9064ae108aaeeec" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "rand_core 0.10.0", + "wasip2", + "wasip3", ] [[package]] @@ -390,30 +523,40 @@ dependencies = [ "async-trait", "async_fn_traits", "bytes", + "file-locker", + "futures", "http-body-util", "hyper", "hyper-rustls", "hyper-util", + "jsonl", "jsonrpsee", + "libversion-sys", "markdown", "mockito", "once_cell", + "parking_lot", "quick-xml", + "rand 0.10.0", "regex", + "reqwest", "rustls", - "rustls-platform-verifier 0.5.0", + "rustls-platform-verifier 0.6.2", "serde", "serde_json", + "serial_test", "tempfile", "tokio", - "version-compare", + "tokio-util", + "url", + "uuid", ] [[package]] -name = "gimli" -version = "0.31.1" +name = "glob" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" [[package]] name = "gloo-net" @@ -430,7 +573,7 @@ dependencies = [ "pin-project", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -463,9 +606,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.7" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" +checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" dependencies = [ "atomic-waker", "bytes", @@ -482,24 +625,32 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.15.1" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a9bfc1af68b1726ea47d3d5109de126281def866b33970e10fbab11b5dafab3" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" [[package]] -name = "hermit-abi" -version = "0.3.9" +name = "heck" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "http" -version = "1.1.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" dependencies = [ "bytes", - "fnv", "itoa", ] @@ -515,12 +666,12 @@ dependencies = [ [[package]] name = "http-body-util" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ "bytes", - "futures-util", + "futures-core", "http", "http-body", "pin-project-lite", @@ -528,9 +679,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.9.5" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" [[package]] name = "httpdate" @@ -540,13 +691,14 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "1.5.2" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "256fb8d4bd6413123cc9d91832d78325c48ff41677595be797d90f42969beae0" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" dependencies = [ + "atomic-waker", "bytes", "futures-channel", - "futures-util", + "futures-core", "h2", "http", "http-body", @@ -554,6 +706,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", + "pin-utils", "smallvec", "tokio", "want", @@ -561,19 +714,18 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.27.5" +version = "0.27.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d191583f3da1305256f22463b9bb0471acad48a4e534a5218b9963e9c1f59b2" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" dependencies = [ - "futures-util", "http", "hyper", "hyper-util", "log", "rustls", - "rustls-native-certs 0.8.1", + "rustls-native-certs", "rustls-pki-types", - "rustls-platform-verifier 0.5.0", + "rustls-platform-verifier 0.6.2", "tokio", "tokio-rustls", "tower-service", @@ -582,16 +734,20 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.10" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" +checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" dependencies = [ + "base64", "bytes", "futures-channel", "futures-util", "http", "http-body", "hyper", + "ipnet", + "libc", + "percent-encoding", "pin-project-lite", "socket2", "tokio", @@ -601,21 +757,22 @@ dependencies = [ [[package]] name = "icu_collections" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" dependencies = [ "displaydoc", + "potential_utf", "yoke", "zerofrom", "zerovec", ] [[package]] -name = "icu_locid" -version = "1.5.0" +name = "icu_locale_core" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" dependencies = [ "displaydoc", "litemap", @@ -624,104 +781,72 @@ dependencies = [ "zerovec", ] -[[package]] -name = "icu_locid_transform" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" -dependencies = [ - "displaydoc", - "icu_locid", - "icu_locid_transform_data", - "icu_provider", - "tinystr", - "zerovec", -] - -[[package]] -name = "icu_locid_transform_data" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" - [[package]] name = "icu_normalizer" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" dependencies = [ - "displaydoc", "icu_collections", "icu_normalizer_data", "icu_properties", "icu_provider", "smallvec", - "utf16_iter", - "utf8_iter", - "write16", "zerovec", ] [[package]] name = "icu_normalizer_data" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" [[package]] name = "icu_properties" -version = "1.5.1" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" dependencies = [ - "displaydoc", "icu_collections", - "icu_locid_transform", + "icu_locale_core", "icu_properties_data", "icu_provider", - "tinystr", + "zerotrie", "zerovec", ] [[package]] name = "icu_properties_data" -version = "1.5.0" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" [[package]] name = "icu_provider" -version = "1.5.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" dependencies = [ "displaydoc", - "icu_locid", - "icu_provider_macros", - "stable_deref_trait", - "tinystr", + "icu_locale_core", "writeable", "yoke", "zerofrom", + "zerotrie", "zerovec", ] [[package]] -name = "icu_provider_macros" -version = "1.5.0" +name = "id-arena" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" [[package]] name = "idna" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" dependencies = [ "idna_adapter", "smallvec", @@ -730,9 +855,9 @@ dependencies = [ [[package]] name = "idna_adapter" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" dependencies = [ "icu_normalizer", "icu_properties", @@ -740,34 +865,47 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.6.0" +version = "2.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" dependencies = [ "equivalent", - "hashbrown", + "hashbrown 0.16.1", + "serde", + "serde_core", ] [[package]] -name = "itoa" -version = "1.0.13" +name = "ipnet" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "540654e97a3f4470a492cd30ff187bc95d89557a903a2bbf112e2fae98104ef2" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] -name = "jni" -version = "0.19.0" +name = "iri-string" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6df18c2e3db7e453d3c6ac5b3e9d5182664d28788126d39b91f2d1e22b017ec" +checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" dependencies = [ - "cesu8", - "combine", - "jni-sys", - "log", - "thiserror", - "walkdir", + "memchr", + "serde", +] + +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", ] +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + [[package]] name = "jni" version = "0.21.1" @@ -779,7 +917,7 @@ dependencies = [ "combine", "jni-sys", "log", - "thiserror", + "thiserror 1.0.69", "walkdir", "windows-sys 0.45.0", ] @@ -790,20 +928,42 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" +[[package]] +name = "jobserver" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" +dependencies = [ + "getrandom 0.3.4", + "libc", +] + [[package]] name = "js-sys" -version = "0.3.72" +version = "0.3.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" +checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3" dependencies = [ + "once_cell", "wasm-bindgen", ] +[[package]] +name = "jsonl" +version = "4.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1abae98f45234fc1980c198798166a80ad6b35eb5b7db4caa7bc72ff919e6b80" +dependencies = [ + "serde", + "serde_json", + "thiserror 1.0.69", +] + [[package]] name = "jsonrpsee" -version = "0.24.7" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5c71d8c1a731cc4227c2f698d377e7848ca12c8a48866fc5e6951c43a4db843" +checksum = "3f3f48dc3e6b8bd21e15436c1ddd0bc22a6a54e8ec46fedd6adf3425f396ec6a" dependencies = [ "jsonrpsee-client-transport", "jsonrpsee-core", @@ -817,9 +977,9 @@ dependencies = [ [[package]] name = "jsonrpsee-client-transport" -version = "0.24.7" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "548125b159ba1314104f5bb5f38519e03a41862786aa3925cf349aae9cdd546e" +checksum = "cf36eb27f8e13fa93dcb50ccb44c417e25b818cfa1a481b5470cd07b19c60b98" dependencies = [ "base64", "futures-channel", @@ -830,9 +990,9 @@ dependencies = [ "pin-project", "rustls", "rustls-pki-types", - "rustls-platform-verifier 0.3.4", + "rustls-platform-verifier 0.5.3", "soketto", - "thiserror", + "thiserror 2.0.17", "tokio", "tokio-rustls", "tokio-util", @@ -842,9 +1002,9 @@ dependencies = [ [[package]] name = "jsonrpsee-core" -version = "0.24.7" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2882f6f8acb9fdaec7cefc4fd607119a9bd709831df7d7672a1d3b644628280" +checksum = "316c96719901f05d1137f19ba598b5fe9c9bc39f4335f67f6be8613921946480" dependencies = [ "async-trait", "bytes", @@ -856,24 +1016,24 @@ dependencies = [ "jsonrpsee-types", "parking_lot", "pin-project", - "rand", + "rand 0.9.2", "rustc-hash", "serde", "serde_json", - "thiserror", + "thiserror 2.0.17", "tokio", "tokio-stream", + "tower", "tracing", "wasm-bindgen-futures", ] [[package]] name = "jsonrpsee-http-client" -version = "0.24.7" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3638bc4617f96675973253b3a45006933bde93c2fd8a6170b33c777cc389e5b" +checksum = "790bedefcec85321e007ff3af84b4e417540d5c87b3c9779b9e247d1bcc3dab8" dependencies = [ - "async-trait", "base64", "http-body", "hyper", @@ -882,21 +1042,20 @@ dependencies = [ "jsonrpsee-core", "jsonrpsee-types", "rustls", - "rustls-platform-verifier 0.3.4", + "rustls-platform-verifier 0.5.3", "serde", "serde_json", - "thiserror", + "thiserror 2.0.17", "tokio", "tower", - "tracing", "url", ] [[package]] name = "jsonrpsee-server" -version = "0.24.7" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82ad8ddc14be1d4290cd68046e7d1d37acd408efed6d3ca08aefcc3ad6da069c" +checksum = "4c51b7c290bb68ce3af2d029648148403863b982f138484a73f02a9dd52dbd7f" dependencies = [ "futures-util", "http", @@ -911,7 +1070,7 @@ dependencies = [ "serde", "serde_json", "soketto", - "thiserror", + "thiserror 2.0.17", "tokio", "tokio-stream", "tokio-util", @@ -921,133 +1080,157 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.24.7" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a178c60086f24cc35bb82f57c651d0d25d99c4742b4d335de04e97fa1f08a8a1" +checksum = "bc88ff4688e43cc3fa9883a8a95c6fa27aa2e76c96e610b737b6554d650d7fd5" dependencies = [ "http", "serde", "serde_json", - "thiserror", + "thiserror 2.0.17", ] [[package]] name = "jsonrpsee-wasm-client" -version = "0.24.7" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a01cd500915d24ab28ca17527e23901ef1be6d659a2322451e1045532516c25" +checksum = "7902885de4779f711a95d82c8da2d7e5f9f3a7c7cfa44d51c067fd1c29d72a3c" dependencies = [ "jsonrpsee-client-transport", "jsonrpsee-core", "jsonrpsee-types", + "tower", ] [[package]] name = "jsonrpsee-ws-client" -version = "0.24.7" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fe322e0896d0955a3ebdd5bf813571c53fea29edd713bc315b76620b327e86d" +checksum = "9b6fceceeb05301cc4c065ab3bd2fa990d41ff4eb44e4ca1b30fa99c057c3e79" dependencies = [ "http", "jsonrpsee-client-transport", "jsonrpsee-core", "jsonrpsee-types", + "tower", "url", ] [[package]] -name = "lazy_static" -version = "1.5.0" +name = "leb128fmt" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" [[package]] name = "libc" -version = "0.2.164" +version = "0.2.182" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "433bfe06b8c75da9b2e3fbea6e5329ff87748f0b144ef75306e674c3f6f7c13f" +checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112" [[package]] -name = "linux-raw-sys" -version = "0.4.14" +name = "libloading" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55" +dependencies = [ + "cfg-if", + "windows-link", +] [[package]] -name = "litemap" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" +name = "libversion-sys" +version = "0.1.0" +source = "git+https://github.com/DUpdateSystem/libversion-sys?rev=68391515ac8f555ca86bc5dfcd980f66352e414d#68391515ac8f555ca86bc5dfcd980f66352e414d" +dependencies = [ + "bindgen", + "cc", + "cmake", +] + +[[package]] +name = "linux-raw-sys" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" [[package]] name = "lock_api" -version = "0.4.12" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" dependencies = [ - "autocfg", "scopeguard", ] [[package]] name = "log" -version = "0.4.22" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "lru-slab" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" [[package]] name = "markdown" -version = "1.0.0-alpha.21" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6491e6c702bf7e3b24e769d800746d5f2c06a6c6a2db7992612e0f429029e81" +checksum = "a5cab8f2cadc416a82d2e783a1946388b31654d391d1c7d92cc1f03e295b1deb" dependencies = [ "unicode-id", ] [[package]] name = "memchr" -version = "2.7.4" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" [[package]] -name = "miniz_oxide" -version = "0.8.0" +name = "minimal-lexical" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" -dependencies = [ - "adler2", -] +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "mio" -version = "1.0.2" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" dependencies = [ - "hermit-abi", "libc", "wasi", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] name = "mockito" -version = "1.6.1" +version = "1.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "652cd6d169a36eaf9d1e6bce1a221130439a966d7f27858af66a33a66e9c4ee2" +checksum = "90820618712cab19cfc46b274c6c22546a82affcb3c3bdf0f29e3db8e1bb92c0" dependencies = [ "assert-json-diff", "bytes", "colored", - "futures-util", + "futures-core", "http", "http-body", "http-body-util", "hyper", "hyper-util", "log", - "rand", + "pin-project-lite", + "rand 0.9.2", "regex", "serde_json", "serde_urlencoded", @@ -1056,59 +1239,44 @@ dependencies = [ ] [[package]] -name = "num-bigint" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" -dependencies = [ - "num-integer", - "num-traits", -] - -[[package]] -name = "num-integer" -version = "0.1.46" +name = "nix" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" dependencies = [ - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" -dependencies = [ - "autocfg", + "bitflags", + "cfg-if", + "cfg_aliases", + "libc", ] [[package]] -name = "object" -version = "0.36.5" +name = "nom" +version = "7.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" dependencies = [ "memchr", + "minimal-lexical", ] [[package]] name = "once_cell" -version = "1.20.2" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "openssl-probe" -version = "0.1.5" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" +checksum = "9f50d9b3dabb09ecd771ad0aa242ca6894994c130308ca3d7684634df8037391" [[package]] name = "parking_lot" -version = "0.12.3" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" dependencies = [ "lock_api", "parking_lot_core", @@ -1116,15 +1284,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.10" +version = "0.9.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", - "windows-targets 0.52.6", + "windows-link", ] [[package]] @@ -1135,24 +1303,24 @@ checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "percent-encoding" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pin-project" -version = "1.1.7" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" +checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.7" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" +checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", @@ -1161,9 +1329,9 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "pin-utils" @@ -1171,44 +1339,125 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + [[package]] name = "ppv-lite86" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ "zerocopy", ] +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn", +] + [[package]] name = "proc-macro2" -version = "1.0.92" +version = "1.0.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" +checksum = "535d180e0ecab6268a3e718bb9fd44db66bbbc256257165fc699dadf70d16fe7" dependencies = [ "unicode-ident", ] [[package]] name = "quick-xml" -version = "0.37.1" +version = "0.39.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f22f29bdff3987b4d8632ef95fd6424ec7e4e0a57e2f4fc63e489e75357f6a03" +checksum = "958f21e8e7ceb5a1aa7fa87fab28e7c75976e0bfe7e23ff069e0a260f894067d" dependencies = [ "encoding_rs", "memchr", "tokio", ] +[[package]] +name = "quinn" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" +dependencies = [ + "bytes", + "cfg_aliases", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls", + "socket2", + "thiserror 2.0.17", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-proto" +version = "0.11.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" +dependencies = [ + "aws-lc-rs", + "bytes", + "getrandom 0.3.4", + "lru-slab", + "rand 0.9.2", + "ring", + "rustc-hash", + "rustls", + "rustls-pki-types", + "slab", + "thiserror 2.0.17", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2", + "tracing", + "windows-sys 0.60.2", +] + [[package]] name = "quote" -version = "1.0.37" +version = "1.0.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +checksum = "dc74d9a594b72ae6656596548f56f667211f8a97b3d4c3d467150794690dc40a" dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + [[package]] name = "rand" version = "0.8.5" @@ -1216,8 +1465,29 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha", - "rand_core", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.3", +] + +[[package]] +name = "rand" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc266eb313df6c5c09c1c7b1fbe2510961e5bcd3add930c1e31f7ed9da0feff8" +dependencies = [ + "chacha20", + "getrandom 0.4.1", + "rand_core 0.10.0", ] [[package]] @@ -1227,7 +1497,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.3", ] [[package]] @@ -1236,23 +1516,38 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom", + "getrandom 0.2.16", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.4", ] +[[package]] +name = "rand_core" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c8d0fd677905edcbeedbf2edb6494d676f0e98d54d5cf9bda0b061cb8fb8aba" + [[package]] name = "redox_syscall" -version = "0.5.7" +version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ "bitflags", ] [[package]] name = "regex" -version = "1.11.1" +version = "1.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" dependencies = [ "aho-corasick", "memchr", @@ -1262,9 +1557,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.9" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" dependencies = [ "aho-corasick", "memchr", @@ -1273,21 +1568,60 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.5" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" + +[[package]] +name = "reqwest" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab3f43e3283ab1488b624b44b0e988d0acea0b3214e694730a055cb6b2efa801" +dependencies = [ + "base64", + "bytes", + "futures-core", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-util", + "js-sys", + "log", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls", + "rustls-pki-types", + "rustls-platform-verifier 0.6.2", + "serde", + "serde_json", + "sync_wrapper", + "tokio", + "tokio-rustls", + "tokio-util", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", +] [[package]] name = "ring" -version = "0.17.8" +version = "0.17.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" dependencies = [ "cc", "cfg-if", - "getrandom", + "getrandom 0.2.16", "libc", - "spin", "untrusted", "windows-sys 0.52.0", ] @@ -1298,37 +1632,32 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afab94fb28594581f62d981211a9a4d53cc8130bbcbbb89a0440d9b8e81a7746" -[[package]] -name = "rustc-demangle" -version = "0.1.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" - [[package]] name = "rustc-hash" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] name = "rustix" -version = "0.38.41" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" +checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys", - "windows-sys 0.52.0", + "windows-sys 0.60.2", ] [[package]] name = "rustls" -version = "0.23.20" +version = "0.23.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5065c3f250cbd332cd894be57c40fa52387247659b14a2d6041d121547903b1b" +checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b" dependencies = [ + "aws-lc-rs", "log", "once_cell", "ring", @@ -1340,84 +1669,66 @@ dependencies = [ [[package]] name = "rustls-native-certs" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" -dependencies = [ - "openssl-probe", - "rustls-pemfile", - "rustls-pki-types", - "schannel", - "security-framework 2.11.1", -] - -[[package]] -name = "rustls-native-certs" -version = "0.8.1" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" +checksum = "612460d5f7bea540c490b2b6395d8e34a953e52b491accd6c86c8164c5932a63" dependencies = [ "openssl-probe", "rustls-pki-types", "schannel", - "security-framework 3.0.1", + "security-framework", ] [[package]] -name = "rustls-pemfile" -version = "2.2.0" +name = "rustls-pki-types" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" +checksum = "21e6f2ab2928ca4291b86736a8bd920a277a399bba1589409d72154ff87c1282" dependencies = [ - "rustls-pki-types", + "web-time", + "zeroize", ] -[[package]] -name = "rustls-pki-types" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" - [[package]] name = "rustls-platform-verifier" -version = "0.3.4" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afbb878bdfdf63a336a5e63561b1835e7a8c91524f51621db870169eac84b490" +checksum = "19787cda76408ec5404443dc8b31795c87cd8fec49762dc75fa727740d34acc1" dependencies = [ - "core-foundation 0.9.4", + "core-foundation", "core-foundation-sys", - "jni 0.19.0", + "jni", "log", "once_cell", "rustls", - "rustls-native-certs 0.7.3", + "rustls-native-certs", "rustls-platform-verifier-android", "rustls-webpki", - "security-framework 2.11.1", + "security-framework", "security-framework-sys", - "webpki-roots", - "winapi", + "webpki-root-certs 0.26.11", + "windows-sys 0.52.0", ] [[package]] name = "rustls-platform-verifier" -version = "0.5.0" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e012c45844a1790332c9386ed4ca3a06def221092eda277e6f079728f8ea99da" +checksum = "1d99feebc72bae7ab76ba994bb5e121b8d83d910ca40b36e0921f53becc41784" dependencies = [ - "core-foundation 0.10.0", + "core-foundation", "core-foundation-sys", - "jni 0.21.1", + "jni", "log", "once_cell", "rustls", - "rustls-native-certs 0.8.1", + "rustls-native-certs", "rustls-platform-verifier-android", "rustls-webpki", - "security-framework 3.0.1", + "security-framework", "security-framework-sys", - "webpki-root-certs", - "windows-sys 0.52.0", + "webpki-root-certs 1.0.5", + "windows-sys 0.60.2", ] [[package]] @@ -1428,20 +1739,27 @@ checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" [[package]] name = "rustls-webpki" -version = "0.102.8" +version = "0.103.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" dependencies = [ + "aws-lc-rs", "ring", "rustls-pki-types", "untrusted", ] +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + [[package]] name = "ryu" -version = "1.0.18" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" +checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984" [[package]] name = "same-file" @@ -1452,13 +1770,22 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "scc" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46e6f046b7fef48e2660c57ed794263155d713de679057f2d0c169bfc6e756cc" +dependencies = [ + "sdd", +] + [[package]] name = "schannel" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -1468,27 +1795,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] -name = "security-framework" -version = "2.11.1" +name = "sdd" +version = "3.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" -dependencies = [ - "bitflags", - "core-foundation 0.9.4", - "core-foundation-sys", - "libc", - "num-bigint", - "security-framework-sys", -] +checksum = "490dcfcbfef26be6800d11870ff2df8774fa6e86d047e3e8c8a76b25655e41ca" [[package]] name = "security-framework" -version = "3.0.1" +version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1415a607e92bec364ea2cf9264646dcce0f91e6d65281bd6f2819cca3bf39c8" +checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" dependencies = [ "bitflags", - "core-foundation 0.10.0", + "core-foundation", "core-foundation-sys", "libc", "security-framework-sys", @@ -1496,14 +1815,20 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.12.1" +version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" dependencies = [ "core-foundation-sys", "libc", ] +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" + [[package]] name = "send_wrapper" version = "0.4.0" @@ -1512,18 +1837,28 @@ checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" [[package]] name = "serde" -version = "1.0.216" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.216" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", @@ -1532,14 +1867,15 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.134" +version = "1.0.149" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" dependencies = [ "itoa", "memchr", - "ryu", "serde", + "serde_core", + "zmij", ] [[package]] @@ -1554,6 +1890,32 @@ dependencies = [ "serde", ] +[[package]] +name = "serial_test" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "911bd979bf1070a3f3aa7b691a3b3e9968f339ceeec89e08c280a8a22207a32f" +dependencies = [ + "futures-executor", + "futures-util", + "log", + "once_cell", + "parking_lot", + "scc", + "serial_test_derive", +] + +[[package]] +name = "serial_test_derive" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a7d91949b85b0d2fb687445e448b40d322b6b3e4af6b44a29b21d9a5f33e6d9" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "sha1" version = "0.10.6" @@ -1561,7 +1923,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", - "cpufeatures", + "cpufeatures 0.2.17", "digest", ] @@ -1573,49 +1935,47 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" -version = "1.4.2" +version = "1.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" dependencies = [ + "errno", "libc", ] [[package]] name = "similar" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e" +checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" [[package]] name = "slab" -version = "0.4.9" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" -dependencies = [ - "autocfg", -] +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" [[package]] name = "smallvec" -version = "1.13.2" +version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] name = "socket2" -version = "0.5.7" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.60.2", ] [[package]] name = "soketto" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37468c595637c10857701c990f93a40ce0e357cedb0953d1c26c8d8027f9bb53" +checksum = "2e859df029d160cb88608f5d7df7fb4753fd20fdfb4de5644f3d8b8440841721" dependencies = [ "base64", "bytes", @@ -1623,21 +1983,15 @@ dependencies = [ "http", "httparse", "log", - "rand", + "rand 0.8.5", "sha1", ] -[[package]] -name = "spin" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" - [[package]] name = "stable_deref_trait" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "subtle" @@ -1647,20 +2001,29 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "2.0.89" +version = "2.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e" +checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + [[package]] name = "synstructure" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", @@ -1669,15 +2032,15 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.14.0" +version = "3.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" +checksum = "82a72c767771b47409d2345987fda8628641887d5466101319899796367354a0" dependencies = [ - "cfg-if", "fastrand", + "getrandom 0.3.4", "once_cell", "rustix", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] @@ -1686,7 +2049,16 @@ version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ - "thiserror-impl", + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" +dependencies = [ + "thiserror-impl 2.0.17", ] [[package]] @@ -1700,23 +2072,48 @@ dependencies = [ "syn", ] +[[package]] +name = "thiserror-impl" +version = "2.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "tinystr" -version = "0.7.6" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" dependencies = [ "displaydoc", "zerovec", ] +[[package]] +name = "tinyvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + [[package]] name = "tokio" -version = "1.42.0" +version = "1.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" +checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" dependencies = [ - "backtrace", "bytes", "libc", "mio", @@ -1725,14 +2122,14 @@ dependencies = [ "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] name = "tokio-macros" -version = "2.4.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" dependencies = [ "proc-macro2", "quote", @@ -1741,20 +2138,19 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.26.0" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" dependencies = [ "rustls", - "rustls-pki-types", "tokio", ] [[package]] name = "tokio-stream" -version = "0.1.16" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" +checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70" dependencies = [ "futures-core", "pin-project-lite", @@ -1764,9 +2160,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.12" +version = "0.7.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" dependencies = [ "bytes", "futures-core", @@ -1778,17 +2174,35 @@ dependencies = [ [[package]] name = "tower" -version = "0.4.13" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ "futures-core", "futures-util", - "pin-project", "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-http" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" +dependencies = [ + "bitflags", + "bytes", + "futures-util", + "http", + "http-body", + "iri-string", + "pin-project-lite", + "tower", "tower-layer", "tower-service", - "tracing", ] [[package]] @@ -1805,11 +2219,10 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" dependencies = [ - "log", "pin-project-lite", "tracing-attributes", "tracing-core", @@ -1817,9 +2230,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", @@ -1828,9 +2241,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" dependencies = [ "once_cell", ] @@ -1843,21 +2256,27 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typenum" -version = "1.17.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" [[package]] name = "unicode-id" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10103c57044730945224467c09f71a4db0071c123a0648cc3e818913bde6b561" +checksum = "70ba288e709927c043cbe476718d37be306be53fb1fafecd0dbe36d072be2580" [[package]] name = "unicode-ident" -version = "1.0.14" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" [[package]] name = "untrusted" @@ -1867,21 +2286,16 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.4" +version = "2.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" dependencies = [ "form_urlencoded", "idna", "percent-encoding", + "serde", ] -[[package]] -name = "utf16_iter" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" - [[package]] name = "utf8_iter" version = "1.0.4" @@ -1889,10 +2303,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] -name = "version-compare" -version = "0.2.0" +name = "uuid" +version = "1.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852e951cb7832cb45cb1169900d19760cfa39b82bc0ea9c0e5a14ae88411c98b" +checksum = "b672338555252d43fd2240c714dc444b8c6fb0a5c5335e65a07bba7742735ddb" +dependencies = [ + "getrandom 0.4.1", + "js-sys", + "serde_core", + "wasm-bindgen", +] [[package]] name = "version_check" @@ -1921,53 +2341,60 @@ dependencies = [ [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] -name = "wasm-bindgen" -version = "0.2.95" +name = "wasip2" +version = "1.0.1+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" dependencies = [ - "cfg-if", - "once_cell", - "wasm-bindgen-macro", + "wit-bindgen 0.46.0", ] [[package]] -name = "wasm-bindgen-backend" -version = "0.2.95" +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" dependencies = [ - "bumpalo", - "log", + "wit-bindgen 0.51.0", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566" +dependencies = [ + "cfg-if", "once_cell", - "proc-macro2", - "quote", - "syn", + "rustversion", + "wasm-bindgen-macro", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.45" +version = "0.4.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b" +checksum = "70a6e77fd0ae8029c9ea0063f87c46fde723e7d887703d74ad2616d792e51e6f" dependencies = [ "cfg-if", + "futures-util", "js-sys", + "once_cell", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.95" +version = "0.2.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" +checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -1975,81 +2402,134 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.95" +version = "0.2.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" +checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55" dependencies = [ + "bumpalo", "proc-macro2", "quote", "syn", - "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.95" +version = "0.2.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasm-streams" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" +checksum = "9d1ec4f6517c9e11ae630e200b2b65d193279042e28edd4a2cda233e46670bbb" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags", + "hashbrown 0.15.5", + "indexmap", + "semver", +] [[package]] name = "web-sys" -version = "0.3.72" +version = "0.3.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112" +checksum = "312e32e551d92129218ea9a2452120f4aabc03529ef03e4d0d82fb2780608598" dependencies = [ "js-sys", "wasm-bindgen", ] [[package]] -name = "webpki-root-certs" -version = "0.26.7" +name = "web-time" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cd5da49bdf1f30054cfe0b8ce2958b8fbeb67c4d82c8967a598af481bef255c" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" dependencies = [ - "rustls-pki-types", + "js-sys", + "wasm-bindgen", ] [[package]] -name = "webpki-roots" -version = "0.26.7" +name = "webpki-root-certs" +version = "0.26.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" +checksum = "75c7f0ef91146ebfb530314f5f1d24528d7f0767efbfd31dce919275413e393e" dependencies = [ - "rustls-pki-types", + "webpki-root-certs 1.0.5", ] [[package]] -name = "winapi" -version = "0.3.9" +name = "webpki-root-certs" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +checksum = "36a29fc0408b113f68cf32637857ab740edfafdf460c326cd2afaa2d84cc05dc" dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", + "rustls-pki-types", ] [[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" +name = "webpki-roots" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +checksum = "12bed680863276c63889429bfd6cab3b99943659923822de1c8a39c49e4d722c" +dependencies = [ + "rustls-pki-types", +] [[package]] name = "winapi-util" -version = "0.1.9" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.60.2", ] [[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" +name = "windows-link" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" [[package]] name = "windows-sys" @@ -2062,29 +2542,29 @@ dependencies = [ [[package]] name = "windows-sys" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] name = "windows-sys" -version = "0.52.0" +version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" dependencies = [ - "windows-targets 0.52.6", + "windows-targets 0.53.5", ] [[package]] name = "windows-sys" -version = "0.59.0" +version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" dependencies = [ - "windows-targets 0.52.6", + "windows-link", ] [[package]] @@ -2102,21 +2582,6 @@ dependencies = [ "windows_x86_64_msvc 0.42.2", ] -[[package]] -name = "windows-targets" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" -dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", -] - [[package]] name = "windows-targets" version = "0.52.6" @@ -2126,13 +2591,30 @@ dependencies = [ "windows_aarch64_gnullvm 0.52.6", "windows_aarch64_msvc 0.52.6", "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm", + "windows_i686_gnullvm 0.52.6", "windows_i686_msvc 0.52.6", "windows_x86_64_gnu 0.52.6", "windows_x86_64_gnullvm 0.52.6", "windows_x86_64_msvc 0.52.6", ] +[[package]] +name = "windows-targets" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" @@ -2141,15 +2623,15 @@ checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" [[package]] name = "windows_aarch64_msvc" @@ -2159,15 +2641,15 @@ checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" [[package]] name = "windows_aarch64_msvc" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_aarch64_msvc" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" [[package]] name = "windows_i686_gnu" @@ -2177,15 +2659,15 @@ checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" [[package]] name = "windows_i686_gnu" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnu" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" [[package]] name = "windows_i686_gnullvm" @@ -2193,6 +2675,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + [[package]] name = "windows_i686_msvc" version = "0.42.2" @@ -2201,15 +2689,15 @@ checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" [[package]] name = "windows_i686_msvc" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_i686_msvc" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" [[package]] name = "windows_x86_64_gnu" @@ -2219,15 +2707,15 @@ checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" [[package]] name = "windows_x86_64_gnu" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnu" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" [[package]] name = "windows_x86_64_gnullvm" @@ -2237,15 +2725,15 @@ checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" [[package]] name = "windows_x86_64_msvc" @@ -2255,35 +2743,122 @@ checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" [[package]] name = "windows_x86_64_msvc" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "windows_x86_64_msvc" -version = "0.52.6" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" [[package]] -name = "write16" -version = "1.0.0" +name = "wit-bindgen" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" + +[[package]] +name = "wit-bindgen" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] [[package]] name = "writeable" -version = "0.5.5" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" [[package]] name = "yoke" -version = "0.7.5" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" dependencies = [ - "serde", "stable_deref_trait", "yoke-derive", "zerofrom", @@ -2291,9 +2866,9 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.7.5" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" dependencies = [ "proc-macro2", "quote", @@ -2303,19 +2878,18 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.7.35" +version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +checksum = "668f5168d10b9ee831de31933dc111a459c97ec93225beb307aed970d1372dfd" dependencies = [ - "byteorder", "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.35" +version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +checksum = "2c7962b26b0a8685668b671ee4b54d007a67d4eaf05fda79ac0ecf41e32270f1" dependencies = [ "proc-macro2", "quote", @@ -2324,18 +2898,18 @@ dependencies = [ [[package]] name = "zerofrom" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", @@ -2345,15 +2919,26 @@ dependencies = [ [[package]] name = "zeroize" -version = "1.8.1" +version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] [[package]] name = "zerovec" -version = "0.10.4" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" dependencies = [ "yoke", "zerofrom", @@ -2362,11 +2947,17 @@ dependencies = [ [[package]] name = "zerovec-derive" -version = "0.10.3" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" dependencies = [ "proc-macro2", "quote", "syn", ] + +[[package]] +name = "zmij" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fc5a66a20078bf1251bde995aa2fdcc4b800c70b5d92dd2c62abc5c60f679f8" diff --git a/Cargo.toml b/Cargo.toml index 487eec5..c7a8f49 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,25 +11,36 @@ webpki-roots = ["hyper-rustls/webpki-roots"] native-tokio = ["hyper-rustls/native-tokio"] [dependencies] -once_cell = "1.19.0" -async-trait = "0.1.79" -hyper = { version = "1.2", features = ["full"] } +once_cell = "1.21.3" +async-trait = "0.1.89" +hyper = { version = "1.8", features = ["full"] } tokio = { version = "1", features = ["full", "macros"] } -bytes = "1.6.0" +bytes = "1.11.0" serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0.114" -quick-xml = { version = "0.37.0", features = ["encoding", "async-tokio"] } +serde_json = "1.0.149" +quick-xml = { version = "0.39.0", features = ["encoding", "async-tokio"] } async_fn_traits = "0.1.1" -version-compare = "0.2.0" -regex = "1.10.4" -hyper-util = { version = "0.1.6", features = ["client", "tokio", "http1"] } -http-body-util = "0.1.1" -jsonrpsee = { version = "0.24.0", features = ["server", "client"] } -hyper-rustls = { version = "0.27.2", features = ["http1", "http2", "native-tokio", "ring", "tls12"], default-features = false } -rustls-platform-verifier = { version = "0.5.0", optional = true } -rustls = { version = "0.23.12", default-features = false } -markdown = "1.0.0-alpha.21" +libversion-sys = { git = "https://github.com/DUpdateSystem/libversion-sys", rev = "68391515ac8f555ca86bc5dfcd980f66352e414d" } +regex = "1.12.2" +hyper-util = { version = "0.1.19", features = ["client", "tokio", "http1"] } +http-body-util = "0.1.3" +jsonrpsee = { version = "0.26.0", features = ["server", "client"] } +hyper-rustls = { version = "0.27.7", features = ["http1", "http2", "native-tokio", "ring", "tls12"], default-features = false } +rustls-platform-verifier = { version = "0.6.2", optional = true } +rustls = { version = "0.23.36", default-features = false } +markdown = "1.0.0" +# Downloader dependencies +reqwest = { version = "0.13", default-features = false, features = ["rustls", "stream", "json"] } +uuid = { version = "1.19", features = ["v4", "serde"] } +parking_lot = "0.12" +futures = "0.3" +tokio-util = { version = "0.7", features = ["io"] } +jsonl = "4.0" +file-locker = "1.1" +url = "2" [dev-dependencies] -mockito = "1.4.0" -tempfile = "3.10.1" +mockito = "1.7.1" +rand = "0.10.0" +serial_test = "3.3.1" +tempfile = "3.24.0" diff --git a/src/api.rs b/src/api.rs deleted file mode 100644 index 8a16436..0000000 --- a/src/api.rs +++ /dev/null @@ -1,51 +0,0 @@ -use std::collections::BTreeMap; -use std::path::Path; - -use crate::cache::init_cache_manager_with_expire; -use crate::core::config::world::{init_world_list, world_list}; -use crate::error::Result; -use crate::websdk::repo::api; - -use crate::utils::json::json_to_string; - -#[allow(dead_code)] -pub async fn init(data_dir: &Path, cache_dir: &Path, global_expire_time: u64) -> Result<()> { - // world list - let world_list_path = data_dir.join(world_list::WORLD_CONFIG_LIST_NAME); - init_world_list(&world_list_path).await?; - // cache - let local_cache_path = cache_dir.join("local_cache"); - init_cache_manager_with_expire(local_cache_path.as_path(), global_expire_time).await; - Ok(()) -} - -#[allow(dead_code)] -pub async fn check_app_available<'a>( - uuid: &str, - app_data: &BTreeMap<&'a str, &'a str>, - hub_data: &BTreeMap<&'a str, &'a str>, -) -> Option { - api::check_app_available(uuid, app_data, hub_data).await -} - -#[allow(dead_code)] -pub async fn get_latest_release<'a>( - uuid: &str, - app_data: &BTreeMap<&'a str, &'a str>, - hub_data: &BTreeMap<&'a str, &'a str>, -) -> Option { - api::get_latest_release(uuid, app_data, hub_data) - .await - .map(|data| json_to_string(&data).unwrap()) -} - -#[allow(dead_code)] -pub async fn get_releases<'a>( - uuid: &str, - app_data: &BTreeMap<&'a str, &'a str>, - hub_data: &BTreeMap<&'a str, &'a str>, -) -> Option { - api::get_releases(uuid, app_data, hub_data) - .await - .map(|data| json_to_string(&data).unwrap()) -} diff --git a/src/cache.rs b/src/cache.rs index 771ea96..e96302b 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -29,6 +29,6 @@ pub async fn init_cache_manager_with_expire(local_cache_path: &Path, expire_time .set_global_expire_time(expire_time); } -pub async fn get_cache_manager<'a>() -> Arc> { +pub async fn get_cache_manager() -> Arc> { INSTANCE_CONTAINER.get().await.clone() } diff --git a/src/core/config/world.rs b/src/core/config/world.rs index bf9037d..9ab73ac 100644 --- a/src/core/config/world.rs +++ b/src/core/config/world.rs @@ -18,7 +18,7 @@ pub async fn init_world_list(world_list_path: &Path) -> Result<()> { Ok(()) } -pub async fn get_world_list<'a>() -> Arc> { +pub async fn get_world_list() -> Arc> { INSTANCE_CONTAINER.get().await.clone() } diff --git a/src/database/mod.rs b/src/database/mod.rs new file mode 100644 index 0000000..dcb6598 --- /dev/null +++ b/src/database/mod.rs @@ -0,0 +1,229 @@ +pub mod models; +pub mod store; + +use models::{ + app::AppRecord, extra_app::ExtraAppRecord, extra_hub::ExtraHubRecord, hub::HubRecord, +}; +use once_cell::sync::OnceCell; +use std::path::Path; + +use crate::error::Result; +use store::{HasId, JsonlStore}; + +impl HasId for AppRecord { + fn id(&self) -> &str { + &self.id + } +} + +impl HasId for HubRecord { + fn id(&self) -> &str { + &self.uuid + } +} + +impl HasId for ExtraAppRecord { + fn id(&self) -> &str { + &self.id + } +} + +impl HasId for ExtraHubRecord { + fn id(&self) -> &str { + &self.id + } +} + +pub struct Database { + pub apps: JsonlStore, + pub hubs: JsonlStore, + pub extra_apps: JsonlStore, + pub extra_hubs: JsonlStore, +} + +impl Database { + pub fn open(data_dir: &Path) -> Result { + let db = Self { + apps: JsonlStore::new(data_dir.join("apps.jsonl")), + hubs: JsonlStore::new(data_dir.join("hubs.jsonl")), + extra_apps: JsonlStore::new(data_dir.join("extra_apps.jsonl")), + extra_hubs: JsonlStore::new(data_dir.join("extra_hubs.jsonl")), + }; + db.apps.ensure_file()?; + db.hubs.ensure_file()?; + db.extra_apps.ensure_file()?; + db.extra_hubs.ensure_file()?; + Ok(db) + } + + // --- App CRUD --- + + pub fn load_apps(&self) -> Result> { + self.apps.load_all() + } + + pub fn upsert_app(&self, record: &AppRecord) -> Result<()> { + self.apps.upsert(record) + } + + pub fn delete_app(&self, id: &str) -> Result { + self.apps.delete::(id) + } + + pub fn find_app(&self, id: &str) -> Result> { + self.apps.find_by_id(id) + } + + // --- Hub CRUD --- + + pub fn load_hubs(&self) -> Result> { + self.hubs.load_all() + } + + pub fn upsert_hub(&self, record: &HubRecord) -> Result<()> { + self.hubs.upsert(record) + } + + pub fn delete_hub(&self, uuid: &str) -> Result { + self.hubs.delete::(uuid) + } + + pub fn find_hub(&self, uuid: &str) -> Result> { + self.hubs.find_by_id(uuid) + } + + // --- ExtraApp CRUD --- + + pub fn load_extra_apps(&self) -> Result> { + self.extra_apps.load_all() + } + + pub fn upsert_extra_app(&self, record: &ExtraAppRecord) -> Result<()> { + self.extra_apps.upsert(record) + } + + pub fn delete_extra_app(&self, id: &str) -> Result { + self.extra_apps.delete::(id) + } + + /// Find an ExtraApp record by matching its `app_id` map. + pub fn get_extra_app_by_app_id( + &self, + app_id: &std::collections::HashMap>, + ) -> Result> { + let all = self.extra_apps.load_all::()?; + Ok(all.into_iter().find(|r| &r.app_id == app_id)) + } + + // --- ExtraHub CRUD --- + + pub fn load_extra_hubs(&self) -> Result> { + self.extra_hubs.load_all() + } + + pub fn upsert_extra_hub(&self, record: &ExtraHubRecord) -> Result<()> { + self.extra_hubs.upsert(record) + } + + pub fn delete_extra_hub(&self, id: &str) -> Result { + self.extra_hubs.delete::(id) + } + + pub fn find_extra_hub(&self, id: &str) -> Result> { + self.extra_hubs.find_by_id(id) + } +} + +static DB: OnceCell = OnceCell::new(); + +/// Initialize the global database. Must be called once before `get_db()`. +/// Idempotent: if already initialized, returns `Ok(())`. +pub fn init_db(data_dir: &Path) -> Result<()> { + if DB.get().is_some() { + return Ok(()); + } + let db = Database::open(data_dir)?; + let _ = DB.set(db); // Ignore error if another thread beat us to it + Ok(()) +} + +/// Get the global database instance. Panics if `init_db` was not called. +pub fn get_db() -> &'static Database { + DB.get() + .expect("Database not initialized. Call init_db() first.") +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::TempDir; + + fn open_test_db() -> (Database, TempDir) { + let dir = tempfile::tempdir().unwrap(); + let db = Database::open(dir.path()).unwrap(); + (db, dir) + } + + #[test] + fn test_open_creates_files() { + let dir = tempfile::tempdir().unwrap(); + Database::open(dir.path()).unwrap(); + assert!(dir.path().join("apps.jsonl").exists()); + assert!(dir.path().join("hubs.jsonl").exists()); + assert!(dir.path().join("extra_apps.jsonl").exists()); + assert!(dir.path().join("extra_hubs.jsonl").exists()); + } + + #[test] + fn test_app_crud() { + let (db, _dir) = open_test_db(); + let app = AppRecord::new( + "TestApp".to_string(), + std::collections::HashMap::from([("owner".to_string(), Some("alice".to_string()))]), + ); + db.upsert_app(&app).unwrap(); + + let apps = db.load_apps().unwrap(); + assert_eq!(apps.len(), 1); + assert_eq!(apps[0].name, "TestApp"); + + let found = db.find_app(&app.id).unwrap(); + assert!(found.is_some()); + + let deleted = db.delete_app(&app.id).unwrap(); + assert!(deleted); + assert!(db.load_apps().unwrap().is_empty()); + } + + #[test] + fn test_hub_crud() { + use crate::websdk::cloud_rules::data::hub_item::{HubItem, Info}; + let (db, _dir) = open_test_db(); + let hub = HubRecord::new( + "fd9b2602-62c5-4d55-bd1e-0d6537714ca0".to_string(), + HubItem { + base_version: 6, + config_version: 1, + uuid: "fd9b2602-62c5-4d55-bd1e-0d6537714ca0".to_string(), + info: Info { + hub_name: "GitHub".to_string(), + hub_icon_url: None, + }, + api_keywords: vec!["owner".to_string(), "repo".to_string()], + auth_keywords: vec![], + app_url_templates: vec![], + target_check_api: None, + }, + ); + db.upsert_hub(&hub).unwrap(); + let hubs = db.load_hubs().unwrap(); + assert_eq!(hubs.len(), 1); + assert_eq!(hubs[0].uuid, "fd9b2602-62c5-4d55-bd1e-0d6537714ca0"); + + let deleted = db + .delete_hub("fd9b2602-62c5-4d55-bd1e-0d6537714ca0") + .unwrap(); + assert!(deleted); + assert!(db.load_hubs().unwrap().is_empty()); + } +} diff --git a/src/database/models/app.rs b/src/database/models/app.rs new file mode 100644 index 0000000..104e0ce --- /dev/null +++ b/src/database/models/app.rs @@ -0,0 +1,101 @@ +use crate::websdk::cloud_rules::data::app_item::AppItem; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct AppRecord { + /// UUID v4 identifier (replaces Room's auto-increment Long id) + pub id: String, + pub name: String, + pub app_id: HashMap>, + #[serde(skip_serializing_if = "Option::is_none")] + pub invalid_version_number_field_regex: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub include_version_number_field_regex: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub ignore_version_number: Option, + /// Cloud config (AppItem), optional + #[serde(skip_serializing_if = "Option::is_none")] + pub cloud_config: Option, + /// Space-separated hub UUIDs in priority order + #[serde(skip_serializing_if = "Option::is_none")] + pub enable_hub_list: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub star: Option, +} + +impl AppRecord { + pub fn new(name: String, app_id: HashMap>) -> Self { + Self { + id: uuid::Uuid::new_v4().to_string(), + name, + app_id, + invalid_version_number_field_regex: None, + include_version_number_field_regex: None, + ignore_version_number: None, + cloud_config: None, + enable_hub_list: None, + star: None, + } + } + + pub fn get_sorted_hub_uuids(&self) -> Vec { + match &self.enable_hub_list { + Some(s) if !s.is_empty() => s.split(' ').map(String::from).collect(), + _ => vec![], + } + } + + pub fn set_sorted_hub_uuids(&mut self, uuids: &[String]) { + let s = uuids.join(" "); + self.enable_hub_list = if s.is_empty() { None } else { Some(s) }; + } +} + +#[cfg(test)] +mod tests { + use super::*; + + fn sample_app() -> AppRecord { + AppRecord { + id: "test-uuid".to_string(), + name: "TestApp".to_string(), + app_id: HashMap::from([("owner".to_string(), Some("alice".to_string()))]), + invalid_version_number_field_regex: None, + include_version_number_field_regex: None, + ignore_version_number: None, + cloud_config: None, + enable_hub_list: Some("hub1 hub2".to_string()), + star: Some(true), + } + } + + #[test] + fn test_serialization_roundtrip() { + let app = sample_app(); + let json = serde_json::to_string(&app).unwrap(); + let decoded: AppRecord = serde_json::from_str(&json).unwrap(); + assert_eq!(app, decoded); + } + + #[test] + fn test_get_sorted_hub_uuids() { + let app = sample_app(); + let uuids = app.get_sorted_hub_uuids(); + assert_eq!(uuids, vec!["hub1", "hub2"]); + } + + #[test] + fn test_set_sorted_hub_uuids() { + let mut app = sample_app(); + app.set_sorted_hub_uuids(&["a".to_string(), "b".to_string(), "c".to_string()]); + assert_eq!(app.enable_hub_list, Some("a b c".to_string())); + } + + #[test] + fn test_empty_hub_list_is_none() { + let mut app = sample_app(); + app.set_sorted_hub_uuids(&[]); + assert_eq!(app.enable_hub_list, None); + } +} diff --git a/src/database/models/extra_app.rs b/src/database/models/extra_app.rs new file mode 100644 index 0000000..0123376 --- /dev/null +++ b/src/database/models/extra_app.rs @@ -0,0 +1,48 @@ +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct ExtraAppRecord { + /// UUID v4 identifier + pub id: String, + pub app_id: HashMap>, + #[serde(skip_serializing_if = "Option::is_none")] + pub mark_version_number: Option, +} + +impl ExtraAppRecord { + pub fn new(app_id: HashMap>) -> Self { + Self { + id: uuid::Uuid::new_v4().to_string(), + app_id, + mark_version_number: None, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_serialization_roundtrip() { + let record = ExtraAppRecord { + id: "test-uuid".to_string(), + app_id: HashMap::from([( + "android_app_package".to_string(), + Some("com.foo".to_string()), + )]), + mark_version_number: Some("1.2.3".to_string()), + }; + let json = serde_json::to_string(&record).unwrap(); + let decoded: ExtraAppRecord = serde_json::from_str(&json).unwrap(); + assert_eq!(record, decoded); + } + + #[test] + fn test_none_fields_skipped() { + let record = ExtraAppRecord::new(HashMap::new()); + let json = serde_json::to_string(&record).unwrap(); + assert!(!json.contains("mark_version_number")); + } +} diff --git a/src/database/models/extra_hub.rs b/src/database/models/extra_hub.rs new file mode 100644 index 0000000..4366d62 --- /dev/null +++ b/src/database/models/extra_hub.rs @@ -0,0 +1,56 @@ +use serde::{Deserialize, Serialize}; + +pub const GLOBAL_HUB_ID: &str = "GLOBAL"; + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct ExtraHubRecord { + /// Hub UUID or GLOBAL_HUB_ID + pub id: String, + #[serde(default)] + pub enable_global: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub url_replace_search: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub url_replace_string: Option, +} + +impl ExtraHubRecord { + pub fn new(id: String) -> Self { + Self { + id, + enable_global: false, + url_replace_search: None, + url_replace_string: None, + } + } + + pub fn global() -> Self { + Self::new(GLOBAL_HUB_ID.to_string()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_serialization_roundtrip() { + let record = ExtraHubRecord { + id: "some-hub-uuid".to_string(), + enable_global: true, + url_replace_search: Some("github.com".to_string()), + url_replace_string: Some("mirror.example.com".to_string()), + }; + let json = serde_json::to_string(&record).unwrap(); + let decoded: ExtraHubRecord = serde_json::from_str(&json).unwrap(); + assert_eq!(record, decoded); + } + + #[test] + fn test_none_fields_skipped() { + let record = ExtraHubRecord::global(); + let json = serde_json::to_string(&record).unwrap(); + assert!(!json.contains("url_replace_search")); + assert!(!json.contains("url_replace_string")); + } +} diff --git a/src/database/models/hub.rs b/src/database/models/hub.rs new file mode 100644 index 0000000..84e94ce --- /dev/null +++ b/src/database/models/hub.rs @@ -0,0 +1,80 @@ +use crate::websdk::cloud_rules::data::hub_item::HubItem; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +pub struct HubRecord { + /// Hub UUID — primary key + pub uuid: String, + pub hub_config: HubItem, + pub auth: HashMap, + #[serde(default)] + pub ignore_app_id_list: Vec>>, + /// 0 = disabled, 1 = enabled + #[serde(default)] + pub applications_mode: i32, + #[serde(default)] + pub user_ignore_app_id_list: Vec>>, + /// Lower = higher priority. Default is -(hub list size). + #[serde(default)] + pub sort_point: i32, +} + +impl HubRecord { + pub fn new(uuid: String, hub_config: HubItem) -> Self { + Self { + uuid, + hub_config, + auth: HashMap::new(), + ignore_app_id_list: vec![], + applications_mode: 0, + user_ignore_app_id_list: vec![], + sort_point: 0, + } + } + + pub fn applications_mode_enabled(&self) -> bool { + self.applications_mode == 1 + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::websdk::cloud_rules::data::hub_item::Info; + + fn sample_hub() -> HubRecord { + HubRecord::new( + "fd9b2602-62c5-4d55-bd1e-0d6537714ca0".to_string(), + HubItem { + base_version: 6, + config_version: 3, + uuid: "fd9b2602-62c5-4d55-bd1e-0d6537714ca0".to_string(), + info: Info { + hub_name: "GitHub".to_string(), + hub_icon_url: None, + }, + api_keywords: vec!["owner".to_string(), "repo".to_string()], + auth_keywords: vec![], + app_url_templates: vec!["https://github.com/%owner/%repo/".to_string()], + target_check_api: None, + }, + ) + } + + #[test] + fn test_serialization_roundtrip() { + let hub = sample_hub(); + let json = serde_json::to_string(&hub).unwrap(); + let decoded: HubRecord = serde_json::from_str(&json).unwrap(); + assert_eq!(hub, decoded); + } + + #[test] + fn test_applications_mode() { + let mut hub = sample_hub(); + assert!(!hub.applications_mode_enabled()); + hub.applications_mode = 1; + assert!(hub.applications_mode_enabled()); + } +} diff --git a/src/database/models/mod.rs b/src/database/models/mod.rs new file mode 100644 index 0000000..c4e40bc --- /dev/null +++ b/src/database/models/mod.rs @@ -0,0 +1,4 @@ +pub mod app; +pub mod extra_app; +pub mod extra_hub; +pub mod hub; diff --git a/src/database/store.rs b/src/database/store.rs new file mode 100644 index 0000000..32d374f --- /dev/null +++ b/src/database/store.rs @@ -0,0 +1,266 @@ +use file_locker::FileLock; +use jsonl::ReadError; +use serde::{de::DeserializeOwned, Serialize}; +use std::io::{BufReader, Seek, SeekFrom, Write}; +use std::path::{Path, PathBuf}; + +use crate::error::{Error, Result}; + +pub trait HasId { + fn id(&self) -> &str; +} + +/// A simple JSONL-backed persistent store for a single record type. +/// +/// Each line in the file is a JSON-serialized record. All mutating operations +/// use `file-locker` advisory locking to ensure safe concurrent access. +pub struct JsonlStore { + path: PathBuf, +} + +impl JsonlStore { + pub fn new(path: impl Into) -> Self { + Self { path: path.into() } + } + + pub fn path(&self) -> &Path { + &self.path + } + + /// Ensure the backing file exists (creates parent dirs as needed). + pub fn ensure_file(&self) -> Result<()> { + if !self.path.exists() { + if let Some(parent) = self.path.parent() { + std::fs::create_dir_all(parent)?; + } + std::fs::File::create(&self.path)?; + } + Ok(()) + } + + /// Read all records from the file line by line. + pub fn load_all(&self) -> Result> { + if !self.path.exists() { + return Ok(vec![]); + } + let file = std::fs::File::open(&self.path)?; + let reader = BufReader::new(file); + read_all_from_reader(reader) + } + + fn acquire_write_lock(&self) -> Result { + self.ensure_file()?; + FileLock::new(&self.path) + .blocking(true) + .writeable(true) + .lock() + .map_err(Error::Io) + } + + /// Overwrite the entire file with the given records (must already hold lock). + fn write_all_locked(lock: &mut FileLock, records: &[T]) -> Result<()> { + lock.file.set_len(0)?; + lock.file.seek(SeekFrom::Start(0))?; + for record in records { + jsonl::write(&mut lock.file, record) + .map_err(|e| Error::Other(format!("jsonl write: {e}")))?; + } + lock.file.flush()?; + Ok(()) + } + + /// Insert or replace a record (matched by id). + pub fn upsert(&self, record: &T) -> Result<()> { + let mut lock = self.acquire_write_lock()?; + let mut records: Vec = { + lock.file.seek(SeekFrom::Start(0))?; + read_all_from_reader(BufReader::new(&lock.file))? + }; + + let id = record.id(); + let serialized: T = serde_json::from_str( + &serde_json::to_string(record).map_err(|e| Error::Other(format!("serialize: {e}")))?, + ) + .map_err(|e| Error::Other(format!("deserialize: {e}")))?; + + if let Some(pos) = records.iter().position(|r| r.id() == id) { + records[pos] = serialized; + } else { + records.push(serialized); + } + + Self::write_all_locked(&mut lock, &records) + } + + /// Delete the record with the given id. Returns true if a record was removed. + pub fn delete(&self, id: &str) -> Result { + let mut lock = self.acquire_write_lock()?; + let records: Vec = { + lock.file.seek(SeekFrom::Start(0))?; + read_all_from_reader(BufReader::new(&lock.file))? + }; + + let original_len = records.len(); + let filtered: Vec = records.into_iter().filter(|r| r.id() != id).collect(); + let deleted = filtered.len() < original_len; + Self::write_all_locked(&mut lock, &filtered)?; + Ok(deleted) + } + + /// Find a record by id (read-only, no lock). + pub fn find_by_id(&self, id: &str) -> Result> { + Ok(self.load_all::()?.into_iter().find(|r| r.id() == id)) + } +} + +fn read_all_from_reader(mut reader: R) -> Result> { + let mut records = Vec::new(); + loop { + match jsonl::read::<_, T>(&mut reader) { + Ok(record) => records.push(record), + Err(ReadError::Eof) => break, + Err(ReadError::Deserialize(e)) => { + eprintln!("JsonlStore: skipping malformed line: {e}"); + } + Err(ReadError::Io(e)) => return Err(Error::Io(e)), + } + } + Ok(records) +} + +#[cfg(test)] +mod tests { + use super::*; + use serde::{Deserialize, Serialize}; + use tempfile::NamedTempFile; + + #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] + struct TestRecord { + id: String, + value: String, + } + + impl HasId for TestRecord { + fn id(&self) -> &str { + &self.id + } + } + + fn make_store() -> (JsonlStore, NamedTempFile) { + let tmp = NamedTempFile::new().unwrap(); + let store = JsonlStore::new(tmp.path()); + (store, tmp) + } + + #[test] + fn test_empty_load() { + let (store, _tmp) = make_store(); + let records: Vec = store.load_all().unwrap(); + assert!(records.is_empty()); + } + + #[test] + fn test_upsert_and_load() { + let (store, _tmp) = make_store(); + let r = TestRecord { + id: "1".to_string(), + value: "hello".to_string(), + }; + store.upsert(&r).unwrap(); + let records: Vec = store.load_all().unwrap(); + assert_eq!(records.len(), 1); + assert_eq!(records[0], r); + } + + #[test] + fn test_upsert_updates_existing() { + let (store, _tmp) = make_store(); + store + .upsert(&TestRecord { + id: "1".to_string(), + value: "old".to_string(), + }) + .unwrap(); + store + .upsert(&TestRecord { + id: "1".to_string(), + value: "new".to_string(), + }) + .unwrap(); + let records: Vec = store.load_all().unwrap(); + assert_eq!(records.len(), 1); + assert_eq!(records[0].value, "new"); + } + + #[test] + fn test_multiple_records() { + let (store, _tmp) = make_store(); + for i in 0..5 { + store + .upsert(&TestRecord { + id: i.to_string(), + value: format!("v{i}"), + }) + .unwrap(); + } + let records: Vec = store.load_all().unwrap(); + assert_eq!(records.len(), 5); + } + + #[test] + fn test_delete() { + let (store, _tmp) = make_store(); + store + .upsert(&TestRecord { + id: "1".to_string(), + value: "a".to_string(), + }) + .unwrap(); + store + .upsert(&TestRecord { + id: "2".to_string(), + value: "b".to_string(), + }) + .unwrap(); + let deleted = store.delete::("1").unwrap(); + assert!(deleted); + let records: Vec = store.load_all().unwrap(); + assert_eq!(records.len(), 1); + assert_eq!(records[0].id, "2"); + } + + #[test] + fn test_delete_nonexistent() { + let (store, _tmp) = make_store(); + let deleted = store.delete::("nope").unwrap(); + assert!(!deleted); + } + + #[test] + fn test_find_by_id() { + let (store, _tmp) = make_store(); + store + .upsert(&TestRecord { + id: "42".to_string(), + value: "answer".to_string(), + }) + .unwrap(); + let found: Option = store.find_by_id("42").unwrap(); + assert!(found.is_some()); + assert_eq!(found.unwrap().value, "answer"); + } + + #[test] + fn test_find_by_id_missing() { + let (store, _tmp) = make_store(); + let found: Option = store.find_by_id("999").unwrap(); + assert!(found.is_none()); + } + + #[test] + fn test_file_not_exist_returns_empty() { + let store = JsonlStore::new("/tmp/this_file_does_not_exist_upgradeall.jsonl"); + let records: Vec = store.load_all().unwrap(); + assert!(records.is_empty()); + } +} diff --git a/src/downloader/config.rs b/src/downloader/config.rs new file mode 100644 index 0000000..a4b0e9d --- /dev/null +++ b/src/downloader/config.rs @@ -0,0 +1,199 @@ +//! Configuration system for the downloader module + +use serde::{Deserialize, Serialize}; + +/// Downloader backend selection +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)] +#[serde(rename_all = "lowercase")] +pub enum DownloaderBackend { + /// Use trauma downloader (default) + #[default] + Trauma, + // Future backends can be added here: + // /// Use reqwest downloader + // Reqwest, + // /// Use custom CLI command + // Custom, +} + +/// Download configuration +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DownloadConfig { + /// Downloader backend to use + pub backend: DownloaderBackend, + + /// Maximum number of concurrent downloads + #[serde(default = "default_max_concurrent")] + pub max_concurrent: usize, + + /// Number of retry attempts for failed downloads + #[serde(default = "default_retries")] + pub retries: usize, + + /// Timeout for each download in seconds + #[serde(default = "default_timeout")] + pub timeout_seconds: u64, + + /// Custom command template for CLI downloader (future use) + /// Example: "wget -O \"${FILE}\" \"${URI}\"" + #[serde(skip_serializing_if = "Option::is_none")] + pub custom_command: Option, +} + +fn default_max_concurrent() -> usize { + 4 +} + +fn default_retries() -> usize { + 3 +} + +fn default_timeout() -> u64 { + 300 // 5 minutes +} + +impl Default for DownloadConfig { + fn default() -> Self { + Self { + backend: DownloaderBackend::default(), + max_concurrent: default_max_concurrent(), + retries: default_retries(), + timeout_seconds: default_timeout(), + custom_command: None, + } + } +} + +impl DownloadConfig { + /// Create a new configuration with default values + pub fn new() -> Self { + Self::default() + } + + /// Set the backend + pub fn with_backend(mut self, backend: DownloaderBackend) -> Self { + self.backend = backend; + self + } + + /// Set max concurrent downloads + pub fn with_max_concurrent(mut self, max: usize) -> Self { + self.max_concurrent = max; + self + } + + /// Set retry count + pub fn with_retries(mut self, retries: usize) -> Self { + self.retries = retries; + self + } + + /// Set timeout in seconds + pub fn with_timeout(mut self, seconds: u64) -> Self { + self.timeout_seconds = seconds; + self + } + + /// Set custom command template + pub fn with_custom_command(mut self, command: impl Into) -> Self { + self.custom_command = Some(command.into()); + self + } + + /// Load configuration from environment variables + /// + /// Supported environment variables: + /// - DOWNLOADER_BACKEND: "trauma" (default) + /// - DOWNLOADER_MAX_CONCURRENT: number (default: 4) + /// - DOWNLOADER_RETRIES: number (default: 3) + /// - DOWNLOADER_TIMEOUT: seconds (default: 300) + /// - FETCHCOMMAND: custom download command (like Portage's FETCHCOMMAND) + pub fn from_env() -> Self { + let mut config = Self::default(); + + if let Ok(backend) = std::env::var("DOWNLOADER_BACKEND") { + config.backend = match backend.to_lowercase().as_str() { + "trauma" => DownloaderBackend::Trauma, + _ => { + eprintln!("Unknown DOWNLOADER_BACKEND: {}, using default", backend); + DownloaderBackend::Trauma + } + }; + } + + if let Ok(max_concurrent) = std::env::var("DOWNLOADER_MAX_CONCURRENT") { + if let Ok(max) = max_concurrent.parse() { + config.max_concurrent = max; + } + } + + if let Ok(retries) = std::env::var("DOWNLOADER_RETRIES") { + if let Ok(retry_count) = retries.parse() { + config.retries = retry_count; + } + } + + if let Ok(timeout) = std::env::var("DOWNLOADER_TIMEOUT") { + if let Ok(timeout_secs) = timeout.parse() { + config.timeout_seconds = timeout_secs; + } + } + + if let Ok(command) = std::env::var("FETCHCOMMAND") { + config.custom_command = Some(command); + } + + config + } + + /// Load configuration from JSON string + pub fn from_json(json: &str) -> Result { + serde_json::from_str(json) + } + + /// Serialize configuration to JSON string + pub fn to_json(&self) -> Result { + serde_json::to_string_pretty(self) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_default_config() { + let config = DownloadConfig::default(); + assert_eq!(config.backend, DownloaderBackend::Trauma); + assert_eq!(config.max_concurrent, 4); + assert_eq!(config.retries, 3); + assert_eq!(config.timeout_seconds, 300); + assert!(config.custom_command.is_none()); + } + + #[test] + fn test_builder_pattern() { + let config = DownloadConfig::new() + .with_backend(DownloaderBackend::Trauma) + .with_max_concurrent(8) + .with_retries(5) + .with_timeout(600) + .with_custom_command("wget -O \"${FILE}\" \"${URI}\""); + + assert_eq!(config.backend, DownloaderBackend::Trauma); + assert_eq!(config.max_concurrent, 8); + assert_eq!(config.retries, 5); + assert_eq!(config.timeout_seconds, 600); + assert!(config.custom_command.is_some()); + } + + #[test] + fn test_json_serialization() { + let config = DownloadConfig::new().with_max_concurrent(8); + let json = config.to_json().unwrap(); + let deserialized: DownloadConfig = DownloadConfig::from_json(&json).unwrap(); + + assert_eq!(config.backend, deserialized.backend); + assert_eq!(config.max_concurrent, deserialized.max_concurrent); + } +} diff --git a/src/downloader/error.rs b/src/downloader/error.rs new file mode 100644 index 0000000..7384726 --- /dev/null +++ b/src/downloader/error.rs @@ -0,0 +1,95 @@ +//! Error types for the downloader module + +use std::fmt; + +/// Error type for download operations +#[derive(Debug, Clone)] +pub struct DownloadError { + pub kind: ErrorKind, + pub message: String, +} + +/// Kinds of download errors +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ErrorKind { + /// Network error (connection failed, timeout, etc.) + Network, + /// File system error (permission denied, disk full, etc.) + FileSystem, + /// Invalid input (bad URL, invalid path, etc.) + InvalidInput, + /// Task not found + TaskNotFound, + /// Task already exists + TaskAlreadyExists, + /// Download was cancelled + Cancelled, + /// Operation not supported by this downloader implementation + Unsupported, + /// Unknown error + Unknown, +} + +impl DownloadError { + pub fn new(kind: ErrorKind, message: impl Into) -> Self { + Self { + kind, + message: message.into(), + } + } + + pub fn network(message: impl Into) -> Self { + Self::new(ErrorKind::Network, message) + } + + pub fn file_system(message: impl Into) -> Self { + Self::new(ErrorKind::FileSystem, message) + } + + pub fn invalid_input(message: impl Into) -> Self { + Self::new(ErrorKind::InvalidInput, message) + } + + pub fn task_not_found(task_id: impl fmt::Display) -> Self { + Self::new( + ErrorKind::TaskNotFound, + format!("Task not found: {}", task_id), + ) + } + + pub fn task_already_exists(task_id: impl fmt::Display) -> Self { + Self::new( + ErrorKind::TaskAlreadyExists, + format!("Task already exists: {}", task_id), + ) + } + + pub fn cancelled(message: impl Into) -> Self { + Self::new(ErrorKind::Cancelled, message) + } + + pub fn unsupported(message: impl Into) -> Self { + Self::new(ErrorKind::Unsupported, message) + } + + pub fn unknown(message: impl Into) -> Self { + Self::new(ErrorKind::Unknown, message) + } +} + +impl fmt::Display for DownloadError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{:?}: {}", self.kind, self.message) + } +} + +impl std::error::Error for DownloadError {} + +impl From for DownloadError { + fn from(err: std::io::Error) -> Self { + Self::file_system(err.to_string()) + } +} + +/// Result type for download operations +pub type Result = std::result::Result; diff --git a/src/downloader/external_rpc_impl.rs b/src/downloader/external_rpc_impl.rs new file mode 100644 index 0000000..e98debd --- /dev/null +++ b/src/downloader/external_rpc_impl.rs @@ -0,0 +1,290 @@ +//! External RPC-based downloader implementation +//! +//! Delegates download operations to an external service via HTTP JSON-RPC 2.0. +//! The external service (e.g., a Kotlin-side GooglePlayDownloader) must implement +//! the standard downloader RPC protocol: +//! - download_submit(url, dest_path, headers?, cookies?) -> {task_id} +//! - download_get_status(task_id) -> TaskInfo +//! - download_wait_for_change(task_id, timeout_seconds) -> TaskInfo +//! - download_pause(task_id) -> bool +//! - download_resume(task_id) -> bool +//! - download_cancel(task_id) -> bool + +use super::error::{DownloadError, Result}; +use super::traits::{Downloader, DownloaderCapabilities, ProgressCallback, RequestOptions}; +use async_trait::async_trait; +use parking_lot::RwLock; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::path::{Path, PathBuf}; + +/// JSON-RPC 2.0 request structure +#[derive(Serialize)] +struct JsonRpcRequest<'a> { + jsonrpc: &'static str, + method: &'a str, + params: serde_json::Value, + id: u64, +} + +/// JSON-RPC 2.0 response structure +#[derive(Deserialize)] +struct JsonRpcResponse { + #[allow(dead_code)] + jsonrpc: Option, + result: Option, + error: Option, + #[allow(dead_code)] + id: Option, +} + +#[derive(Deserialize)] +struct JsonRpcError { + #[allow(dead_code)] + code: i64, + message: String, +} + +/// Task ID response from external download_submit +#[derive(Deserialize)] +struct ExternalTaskIdResponse { + task_id: String, +} + +/// Task info from external service (subset we care about) +#[derive(Deserialize, Debug)] +struct ExternalTaskInfo { + #[allow(dead_code)] + task_id: String, + state: String, + progress: ExternalProgress, + error: Option, +} + +#[derive(Deserialize, Debug)] +struct ExternalProgress { + downloaded_bytes: u64, + total_bytes: Option, + #[allow(dead_code)] + speed_bytes_per_sec: Option, + #[allow(dead_code)] + eta_seconds: Option, +} + +/// Downloader that delegates all operations to an external JSON-RPC service. +/// +/// The external service is expected to implement the full downloader protocol +/// (submit, status, wait_for_change, pause, resume, cancel). +pub struct ExternalRpcDownloader { + rpc_url: String, + http_client: reqwest::Client, + /// url -> external task_id (for routing cancel/pause/resume by url) + task_mapping: RwLock>, + /// Atomic request ID counter + request_id: std::sync::atomic::AtomicU64, + capabilities: DownloaderCapabilities, +} + +impl ExternalRpcDownloader { + /// Create a new external RPC downloader pointing at the given service URL. + pub fn new(rpc_url: String) -> Self { + Self { + rpc_url, + http_client: reqwest::Client::new(), + task_mapping: RwLock::new(HashMap::new()), + request_id: std::sync::atomic::AtomicU64::new(1), + capabilities: DownloaderCapabilities::all_enabled(), + } + } + + /// Make a JSON-RPC call to the external service. + async fn rpc_call( + &self, + method: &str, + params: serde_json::Value, + ) -> Result { + let id = self + .request_id + .fetch_add(1, std::sync::atomic::Ordering::Relaxed); + + let request = JsonRpcRequest { + jsonrpc: "2.0", + method, + params, + id, + }; + + let response = self + .http_client + .post(&self.rpc_url) + .json(&request) + .send() + .await + .map_err(|e| DownloadError::network(format!("RPC request failed: {}", e)))?; + + let rpc_response: JsonRpcResponse = response + .json() + .await + .map_err(|e| DownloadError::network(format!("RPC response parse failed: {}", e)))?; + + if let Some(err) = rpc_response.error { + return Err(DownloadError::network(format!( + "RPC error: {}", + err.message + ))); + } + + let result = rpc_response + .result + .ok_or_else(|| DownloadError::network("RPC response missing result"))?; + + serde_json::from_value(result) + .map_err(|e| DownloadError::network(format!("RPC result deserialize failed: {}", e))) + } +} + +#[async_trait] +impl Downloader for ExternalRpcDownloader { + async fn download( + &self, + url: &str, + dest: &Path, + progress: Option, + options: Option, + ) -> Result<()> { + // 1. Submit download task to external service + let params = serde_json::json!({ + "url": url, + "dest_path": dest.to_str().unwrap_or(""), + "headers": options.as_ref().and_then(|o| o.headers.clone()), + "cookies": options.as_ref().and_then(|o| o.cookies.clone()), + }); + + let submit_response: ExternalTaskIdResponse = + self.rpc_call("download_submit", params).await?; + let external_task_id = submit_response.task_id; + + // 2. Record mapping for cancel/pause/resume + self.task_mapping + .write() + .insert(url.to_string(), external_task_id.clone()); + + // 3. Poll for status changes until terminal state + loop { + let params = serde_json::json!({ + "task_id": &external_task_id, + "timeout_seconds": 30_u64, + }); + + let task_info: ExternalTaskInfo = + match self.rpc_call("download_wait_for_change", params).await { + Ok(info) => info, + Err(e) => { + // On poll error, try a direct status check + let status_params = serde_json::json!({ + "task_id": &external_task_id, + }); + match self + .rpc_call::("download_get_status", status_params) + .await + { + Ok(info) => info, + Err(_) => { + // Both failed, clean up and return error + self.task_mapping.write().remove(url); + return Err(e); + } + } + } + }; + + // Update progress callback + if let Some(ref cb) = progress { + cb( + task_info.progress.downloaded_bytes, + task_info.progress.total_bytes, + ); + } + + // Check terminal states + match task_info.state.as_str() { + "completed" => { + self.task_mapping.write().remove(url); + return Ok(()); + } + "failed" => { + self.task_mapping.write().remove(url); + let msg = task_info + .error + .unwrap_or_else(|| "External download failed".to_string()); + return Err(DownloadError::network(msg)); + } + "cancelled" => { + self.task_mapping.write().remove(url); + return Err(DownloadError::cancelled("Download was cancelled")); + } + _ => { + // pending, downloading, stopped — continue polling + continue; + } + } + } + } + + async fn download_batch(&self, tasks: Vec<(String, PathBuf)>) -> Vec> { + // Simple sequential implementation for external downloaders + let mut results = Vec::with_capacity(tasks.len()); + for (url, dest) in tasks { + let result = self.download(&url, &dest, None, None).await; + results.push(result); + } + results + } + + fn name(&self) -> &str { + "external_rpc" + } + + fn capabilities(&self) -> &DownloaderCapabilities { + &self.capabilities + } + + async fn cancel(&self, url: &str) -> Result<()> { + let ext_id = self.task_mapping.read().get(url).cloned(); + if let Some(ext_id) = ext_id { + let params = serde_json::json!({"task_id": ext_id}); + let _: bool = self.rpc_call("download_cancel", params).await?; + } + Ok(()) + } + + async fn pause(&self, url: &str) -> Result<()> { + let ext_id = self.task_mapping.read().get(url).cloned(); + if let Some(ext_id) = ext_id { + let params = serde_json::json!({"task_id": ext_id}); + let _: bool = self.rpc_call("download_pause", params).await?; + } + Ok(()) + } + + async fn resume(&self, url: &str) -> Result<()> { + let ext_id = self.task_mapping.read().get(url).cloned(); + if let Some(ext_id) = ext_id { + let params = serde_json::json!({"task_id": ext_id}); + let _: bool = self.rpc_call("download_resume", params).await?; + } + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_create_external_downloader() { + let dl = ExternalRpcDownloader::new("http://127.0.0.1:12345".to_string()); + assert_eq!(dl.name(), "external_rpc"); + assert!(dl.capabilities().supports_pause); + } +} diff --git a/src/downloader/hub_dispatch.rs b/src/downloader/hub_dispatch.rs new file mode 100644 index 0000000..c6b753b --- /dev/null +++ b/src/downloader/hub_dispatch.rs @@ -0,0 +1,208 @@ +//! Hub-based download dispatcher +//! +//! Routes download tasks to registered external downloaders based on hub_uuid. +//! If no external downloader is registered for the given hub_uuid, falls back +//! to the default built-in downloader (TraumaDownloader / HTTP). + +use super::error::Result; +use super::traits::{Downloader, DownloaderCapabilities, ProgressCallback, RequestOptions}; +use async_trait::async_trait; +use parking_lot::RwLock; +use std::collections::HashMap; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +/// Shared internal state for HubDispatchDownloader. +/// +/// Wrapped in Arc so that clones are cheap and share the same state. +/// This allows the RPC server to hold a reference for register/unregister +/// while DownloadTaskManager holds another reference as Box. +struct HubDispatchState { + /// hub_uuid -> external downloader + external: RwLock>>>, + /// Default downloader for unregistered hub_uuids (TraumaDownloader) + default: Arc>, + /// Active task tracking: url -> hub_uuid (for routing cancel/pause/resume) + active_tasks: RwLock>>, +} + +/// Downloads dispatcher that routes tasks by hub_uuid. +/// +/// - If `hub_uuid` is provided (via `RequestOptions.metadata["hub_uuid"]`) +/// and a downloader is registered for it, the task is routed there. +/// - Otherwise, the default built-in HTTP downloader handles it. +/// +/// This struct is cheaply cloneable (Arc-based shared state). +pub struct HubDispatchDownloader { + state: Arc, +} + +impl HubDispatchDownloader { + /// Create a new dispatcher with the given default downloader. + pub fn new(default: Box) -> Self { + Self { + state: Arc::new(HubDispatchState { + external: RwLock::new(HashMap::new()), + default: Arc::new(default), + active_tasks: RwLock::new(HashMap::new()), + }), + } + } + + /// Register an external downloader for a specific hub_uuid. + pub fn register(&self, hub_uuid: &str, downloader: Box) { + self.state + .external + .write() + .insert(hub_uuid.to_string(), Arc::new(downloader)); + } + + /// Unregister the external downloader for a specific hub_uuid. + pub fn unregister(&self, hub_uuid: &str) { + self.state.external.write().remove(hub_uuid); + } + + /// Check if a downloader is registered for the given hub_uuid. + pub fn has_downloader(&self, hub_uuid: &str) -> bool { + self.state.external.read().contains_key(hub_uuid) + } + + /// Resolve which downloader to use based on hub_uuid. + fn resolve(&self, hub_uuid: Option<&str>) -> Arc> { + if let Some(uuid) = hub_uuid { + if let Some(dl) = self.state.external.read().get(uuid) { + return dl.clone(); + } + } + self.state.default.clone() + } + + /// Extract hub_uuid from RequestOptions metadata. + fn extract_hub_uuid(options: Option<&RequestOptions>) -> Option { + options + .and_then(|o| o.metadata.as_ref()) + .and_then(|m| m.get("hub_uuid")) + .cloned() + } + + /// Record an active task for url -> hub_uuid routing. + fn track_task(&self, url: &str, hub_uuid: Option) { + self.state + .active_tasks + .write() + .insert(url.to_string(), hub_uuid); + } + + /// Remove an active task record. + fn untrack_task(&self, url: &str) { + self.state.active_tasks.write().remove(url); + } + + /// Look up which hub_uuid (if any) an active URL belongs to. + fn lookup_task_hub(&self, url: &str) -> Option> { + self.state.active_tasks.read().get(url).cloned() + } +} + +impl Clone for HubDispatchDownloader { + fn clone(&self) -> Self { + Self { + state: self.state.clone(), + } + } +} + +#[async_trait] +impl Downloader for HubDispatchDownloader { + async fn download( + &self, + url: &str, + dest: &Path, + progress: Option, + options: Option, + ) -> Result<()> { + let hub_uuid = Self::extract_hub_uuid(options.as_ref()); + let downloader = self.resolve(hub_uuid.as_deref()); + + // Track this task so cancel/pause/resume can find the right downloader + self.track_task(url, hub_uuid); + + let result = downloader.download(url, dest, progress, options).await; + + // Clean up tracking on completion (success or failure) + self.untrack_task(url); + + result + } + + async fn download_batch(&self, tasks: Vec<(String, PathBuf)>) -> Vec> { + // Batch downloads go through the default downloader since there's + // no per-task metadata available in the batch interface. + self.state.default.download_batch(tasks).await + } + + fn name(&self) -> &str { + "hub_dispatch" + } + + fn capabilities(&self) -> &DownloaderCapabilities { + // Return default backend capabilities + self.state.default.capabilities() + } + + async fn cancel(&self, url: &str) -> Result<()> { + let hub_uuid = self.lookup_task_hub(url).flatten(); + self.resolve(hub_uuid.as_deref()).cancel(url).await + } + + async fn pause(&self, url: &str) -> Result<()> { + let hub_uuid = self.lookup_task_hub(url).flatten(); + self.resolve(hub_uuid.as_deref()).pause(url).await + } + + async fn resume(&self, url: &str) -> Result<()> { + let hub_uuid = self.lookup_task_hub(url).flatten(); + self.resolve(hub_uuid.as_deref()).resume(url).await + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::downloader::TraumaDownloader; + + #[test] + fn test_create_dispatcher() { + let default = Box::new(TraumaDownloader::default_settings()); + let dispatcher = HubDispatchDownloader::new(default); + assert_eq!(dispatcher.name(), "hub_dispatch"); + assert!(!dispatcher.has_downloader("some-uuid")); + } + + #[test] + fn test_register_unregister() { + let default = Box::new(TraumaDownloader::default_settings()); + let dispatcher = HubDispatchDownloader::new(default); + + let ext = Box::new(TraumaDownloader::default_settings()); + dispatcher.register("test-uuid", ext); + assert!(dispatcher.has_downloader("test-uuid")); + + dispatcher.unregister("test-uuid"); + assert!(!dispatcher.has_downloader("test-uuid")); + } + + #[test] + fn test_clone_shares_state() { + let default = Box::new(TraumaDownloader::default_settings()); + let dispatcher = HubDispatchDownloader::new(default); + + let clone = dispatcher.clone(); + + let ext = Box::new(TraumaDownloader::default_settings()); + dispatcher.register("shared-uuid", ext); + + // Clone should see the registration + assert!(clone.has_downloader("shared-uuid")); + } +} diff --git a/src/downloader/mod.rs b/src/downloader/mod.rs new file mode 100644 index 0000000..d4f2233 --- /dev/null +++ b/src/downloader/mod.rs @@ -0,0 +1,58 @@ +//! Downloader module providing pluggable download functionality +//! +//! This module implements a flexible, trait-based downloader system that supports: +//! - Multiple backend implementations (trauma, reqwest, custom CLI, etc.) +//! - Task state management with progress tracking +//! - Long-polling for download status updates +//! - Configuration-driven backend selection +//! - JSON-RPC integration for remote control + +mod config; +mod error; +mod external_rpc_impl; +mod hub_dispatch; +mod state; +mod task_manager; +mod traits; +mod trauma_impl; + +pub use config::{DownloadConfig, DownloaderBackend}; +pub use error::{DownloadError, Result}; +pub use external_rpc_impl::ExternalRpcDownloader; +pub use hub_dispatch::HubDispatchDownloader; +pub use state::{DownloadProgress, DownloadState, SpeedCalculator, TaskInfo}; +pub use task_manager::DownloadTaskManager; +pub use traits::Downloader; +pub use trauma_impl::TraumaDownloader; + +/// Create a downloader instance based on the provided configuration +pub fn create_downloader(config: &DownloadConfig) -> Box { + match config.backend { + DownloaderBackend::Trauma => Box::new(TraumaDownloader::new( + config.max_concurrent, + config.retries, + config.timeout_seconds, + )), + // Future implementations can be added here + // DownloaderBackend::Reqwest => Box::new(ReqwestDownloader::new(config)), + // DownloaderBackend::Custom => Box::new(CliDownloader::new(config)), + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_create_trauma_downloader() { + let config = DownloadConfig::default(); + let downloader = create_downloader(&config); + assert_eq!(downloader.name(), "reqwest"); + + // Test that capabilities are accessible + let caps = downloader.capabilities(); + assert!(caps.supports_pause); + assert!(caps.supports_resume); + assert!(caps.supports_cancellation); + } +} diff --git a/src/downloader/state.rs b/src/downloader/state.rs new file mode 100644 index 0000000..718b33b --- /dev/null +++ b/src/downloader/state.rs @@ -0,0 +1,532 @@ +//! Download state and progress tracking types + +use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use std::collections::VecDeque; +use std::time::{Instant, SystemTime, UNIX_EPOCH}; +use uuid::Uuid; + +/// Speed calculator using sliding window for smooth speed measurement +/// +/// Keeps track of download samples over a time window and calculates +/// average speed to avoid fluctuations. +#[derive(Debug, Clone)] +pub struct SpeedCalculator { + /// Samples: (timestamp, downloaded_bytes) + samples: VecDeque<(Instant, u64)>, + /// Window size in seconds + window_secs: u64, + /// Maximum number of samples to keep + max_samples: usize, + /// Start time for calculating speed when only one sample exists + start_time: Option, +} + +impl SpeedCalculator { + /// Create a new speed calculator with specified window size + pub fn new(window_secs: u64) -> Self { + Self { + samples: VecDeque::with_capacity(64), + window_secs, + max_samples: 64, + start_time: None, + } + } + + /// Create with default 5-second window + pub fn default_window() -> Self { + Self::new(5) + } + + /// Record a new sample + pub fn record(&mut self, downloaded_bytes: u64) { + let now = Instant::now(); + + // Initialize start time on first record + if self.start_time.is_none() { + self.start_time = Some(now); + } + + // Remove samples outside the window + let cutoff = now - std::time::Duration::from_secs(self.window_secs); + while let Some((time, _)) = self.samples.front() { + if *time < cutoff { + self.samples.pop_front(); + } else { + break; + } + } + + // Add new sample + self.samples.push_back((now, downloaded_bytes)); + + // Limit max samples + while self.samples.len() > self.max_samples { + self.samples.pop_front(); + } + } + + /// Calculate average speed in bytes per second + pub fn speed_bytes_per_sec(&self) -> Option { + let (last_time, last_bytes) = self.samples.back()?; + + // Use sliding window if we have multiple samples + if self.samples.len() >= 2 { + let (first_time, first_bytes) = self.samples.front()?; + let duration = last_time.duration_since(*first_time); + let duration_secs = duration.as_secs_f64().max(0.001); + let bytes_diff = last_bytes.saturating_sub(*first_bytes); + return Some((bytes_diff as f64 / duration_secs) as u64); + } + + // For single sample, calculate from start time + if let Some(start) = self.start_time { + let duration = last_time.duration_since(start); + let duration_secs = duration.as_secs_f64().max(0.001); + return Some((*last_bytes as f64 / duration_secs) as u64); + } + + None + } + + /// Reset the calculator + pub fn reset(&mut self) { + self.samples.clear(); + self.start_time = None; + } +} + +/// Serialize SystemTime as Unix timestamp in milliseconds +fn serialize_system_time(time: &SystemTime, serializer: S) -> Result +where + S: Serializer, +{ + let millis = time + .duration_since(UNIX_EPOCH) + .map(|d| d.as_millis() as i64) + .unwrap_or(0); + serializer.serialize_i64(millis) +} + +/// Serialize Option as Unix timestamp in milliseconds +fn serialize_option_system_time( + time: &Option, + serializer: S, +) -> Result +where + S: Serializer, +{ + match time { + Some(t) => { + let millis = t + .duration_since(UNIX_EPOCH) + .map(|d| d.as_millis() as i64) + .unwrap_or(0); + serializer.serialize_some(&millis) + } + None => serializer.serialize_none(), + } +} + +/// Deserialize Unix timestamp in milliseconds to SystemTime +fn deserialize_system_time<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + let millis = i64::deserialize(deserializer)?; + Ok(UNIX_EPOCH + std::time::Duration::from_millis(millis as u64)) +} + +/// Deserialize Unix timestamp in milliseconds to Option +fn deserialize_option_system_time<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + let opt: Option = Option::deserialize(deserializer)?; + Ok(opt.map(|millis| UNIX_EPOCH + std::time::Duration::from_millis(millis as u64))) +} + +/// Download task state +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum DownloadState { + /// Task is queued but not started yet + Pending, + /// Download is in progress + Downloading, + /// Download is paused (can be resumed) + Stopped, + /// Download completed successfully + Completed, + /// Download failed + Failed, + /// Download was cancelled + Cancelled, +} + +impl DownloadState { + /// Check if the task is in a terminal state (completed, failed, or cancelled) + pub fn is_terminal(&self) -> bool { + matches!( + self, + DownloadState::Completed | DownloadState::Failed | DownloadState::Cancelled + ) + } + + /// Check if the task is active (pending, downloading, or stopped/paused) + pub fn is_active(&self) -> bool { + matches!( + self, + DownloadState::Pending | DownloadState::Downloading | DownloadState::Stopped + ) + } + + /// Check if the task can be resumed + pub fn is_resumable(&self) -> bool { + matches!(self, DownloadState::Stopped | DownloadState::Failed) + } + + /// Check if the task can be paused + pub fn is_pausable(&self) -> bool { + matches!(self, DownloadState::Downloading) + } +} + +/// Download progress information +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub struct DownloadProgress { + /// Number of bytes downloaded so far + pub downloaded_bytes: u64, + + /// Total size in bytes (None if unknown) + pub total_bytes: Option, + + /// Download speed in bytes per second (average) + pub speed_bytes_per_sec: Option, + + /// Estimated time remaining in seconds (None if unknown) + pub eta_seconds: Option, +} + +impl DownloadProgress { + /// Create a new progress with downloaded bytes + pub fn new(downloaded_bytes: u64, total_bytes: Option) -> Self { + Self { + downloaded_bytes, + total_bytes, + speed_bytes_per_sec: None, + eta_seconds: None, + } + } + + /// Create a new progress with speed and ETA + pub fn with_speed( + downloaded_bytes: u64, + total_bytes: Option, + speed_bytes_per_sec: Option, + ) -> Self { + let eta_seconds = match (total_bytes, speed_bytes_per_sec) { + (Some(total), Some(speed)) if speed > 0 && total > downloaded_bytes => { + Some((total - downloaded_bytes) / speed) + } + _ => None, + }; + + Self { + downloaded_bytes, + total_bytes, + speed_bytes_per_sec, + eta_seconds, + } + } + + /// Calculate progress percentage (0-100) + pub fn percentage(&self) -> Option { + self.total_bytes.map(|total| { + if total == 0 { + 0.0 + } else { + (self.downloaded_bytes as f64 / total as f64) * 100.0 + } + }) + } + + /// Check if download is complete + pub fn is_complete(&self) -> bool { + if let Some(total) = self.total_bytes { + self.downloaded_bytes >= total + } else { + false + } + } +} + +/// Complete information about a download task +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TaskInfo { + /// Unique task identifier + pub task_id: String, + + /// Source URL + pub url: String, + + /// Destination file path + pub dest_path: String, + + /// Current state + pub state: DownloadState, + + /// Progress information + pub progress: DownloadProgress, + + /// Resume offset in bytes (for resuming paused downloads) + pub resume_offset: u64, + + /// Whether the server supports HTTP Range requests (None if not tested yet) + pub supports_range: Option, + + /// Error message (if state is Failed) + pub error: Option, + + /// Task creation timestamp (Unix timestamp in milliseconds) + #[serde( + serialize_with = "serialize_system_time", + deserialize_with = "deserialize_system_time" + )] + pub created_at: SystemTime, + + /// Task start timestamp (Unix timestamp in milliseconds, None if not started yet) + #[serde( + serialize_with = "serialize_option_system_time", + deserialize_with = "deserialize_option_system_time" + )] + pub started_at: Option, + + /// Task completion timestamp (Unix timestamp in milliseconds, None if not completed) + #[serde( + serialize_with = "serialize_option_system_time", + deserialize_with = "deserialize_option_system_time" + )] + pub completed_at: Option, + + /// Task last paused timestamp (Unix timestamp in milliseconds, None if never paused) + #[serde( + serialize_with = "serialize_option_system_time", + deserialize_with = "deserialize_option_system_time" + )] + pub paused_at: Option, + + /// HTTP headers to include in requests + #[serde(default)] + pub headers: Option>, + + /// HTTP cookies to include in requests + #[serde(default)] + pub cookies: Option>, + + /// Hub UUID for dispatching to registered external downloaders. + /// When set, the download task manager routes this task to the + /// external downloader registered for this hub_uuid. + /// When None, the default built-in downloader is used. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub hub_uuid: Option, +} + +impl TaskInfo { + /// Create a new task info + pub fn new(url: impl Into, dest_path: impl Into) -> Self { + Self { + task_id: Uuid::new_v4().to_string(), + url: url.into(), + dest_path: dest_path.into(), + state: DownloadState::Pending, + progress: DownloadProgress::default(), + resume_offset: 0, + supports_range: None, + error: None, + created_at: SystemTime::now(), + started_at: None, + completed_at: None, + paused_at: None, + headers: None, + cookies: None, + hub_uuid: None, + } + } + + /// Create with a specific task ID + pub fn with_id( + task_id: impl Into, + url: impl Into, + dest_path: impl Into, + ) -> Self { + Self { + task_id: task_id.into(), + url: url.into(), + dest_path: dest_path.into(), + state: DownloadState::Pending, + progress: DownloadProgress::default(), + resume_offset: 0, + supports_range: None, + error: None, + created_at: SystemTime::now(), + started_at: None, + completed_at: None, + paused_at: None, + headers: None, + cookies: None, + hub_uuid: None, + } + } + + /// Create with headers, cookies, and optional hub_uuid + pub fn with_options( + task_id: impl Into, + url: impl Into, + dest_path: impl Into, + headers: Option>, + cookies: Option>, + hub_uuid: Option, + ) -> Self { + Self { + task_id: task_id.into(), + url: url.into(), + dest_path: dest_path.into(), + state: DownloadState::Pending, + progress: DownloadProgress::default(), + resume_offset: 0, + supports_range: None, + error: None, + created_at: SystemTime::now(), + started_at: None, + completed_at: None, + paused_at: None, + headers, + cookies, + hub_uuid, + } + } + + /// Mark task as started + pub fn mark_started(&mut self) { + self.state = DownloadState::Downloading; + self.started_at = Some(SystemTime::now()); + } + + /// Mark task as completed + pub fn mark_completed(&mut self) { + self.state = DownloadState::Completed; + self.completed_at = Some(SystemTime::now()); + } + + /// Mark task as failed with error message + pub fn mark_failed(&mut self, error: impl Into) { + self.state = DownloadState::Failed; + self.error = Some(error.into()); + self.completed_at = Some(SystemTime::now()); + } + + /// Mark task as cancelled + pub fn mark_cancelled(&mut self) { + self.state = DownloadState::Cancelled; + self.completed_at = Some(SystemTime::now()); + } + + /// Mark task as stopped/paused + pub fn mark_stopped(&mut self) { + self.state = DownloadState::Stopped; + self.resume_offset = self.progress.downloaded_bytes; + self.paused_at = Some(SystemTime::now()); + } + + /// Mark task as resumed from stopped state + pub fn mark_resumed(&mut self) { + self.state = DownloadState::Downloading; + self.paused_at = None; + } + + /// Update progress + pub fn update_progress(&mut self, progress: DownloadProgress) { + self.progress = progress; + } + + /// Set whether the server supports Range requests + pub fn set_range_support(&mut self, supports: bool) { + self.supports_range = Some(supports); + } + + /// Calculate elapsed time in seconds + pub fn elapsed_seconds(&self) -> Option { + self.started_at.and_then(|start| { + SystemTime::now() + .duration_since(start) + .ok() + .map(|d| d.as_secs()) + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_download_state() { + assert!(DownloadState::Completed.is_terminal()); + assert!(DownloadState::Failed.is_terminal()); + assert!(DownloadState::Cancelled.is_terminal()); + assert!(!DownloadState::Pending.is_terminal()); + assert!(!DownloadState::Downloading.is_terminal()); + assert!(!DownloadState::Stopped.is_terminal()); + + assert!(DownloadState::Pending.is_active()); + assert!(DownloadState::Downloading.is_active()); + assert!(DownloadState::Stopped.is_active()); + assert!(!DownloadState::Completed.is_active()); + + assert!(DownloadState::Downloading.is_pausable()); + assert!(!DownloadState::Stopped.is_pausable()); + assert!(!DownloadState::Completed.is_pausable()); + + assert!(DownloadState::Stopped.is_resumable()); + assert!(DownloadState::Failed.is_resumable()); + assert!(!DownloadState::Downloading.is_resumable()); + assert!(!DownloadState::Completed.is_resumable()); + } + + #[test] + fn test_download_progress() { + let mut progress = DownloadProgress::new(50, Some(100)); + assert_eq!(progress.percentage(), Some(50.0)); + assert!(!progress.is_complete()); + + progress.downloaded_bytes = 100; + assert_eq!(progress.percentage(), Some(100.0)); + assert!(progress.is_complete()); + } + + #[test] + fn test_task_info() { + let mut task = TaskInfo::new("http://example.com/file", "/tmp/file"); + assert_eq!(task.state, DownloadState::Pending); + assert!(task.started_at.is_none()); + assert!(task.completed_at.is_none()); + + task.mark_started(); + assert_eq!(task.state, DownloadState::Downloading); + assert!(task.started_at.is_some()); + + task.mark_completed(); + assert_eq!(task.state, DownloadState::Completed); + assert!(task.completed_at.is_some()); + } + + #[test] + fn test_task_info_failure() { + let mut task = TaskInfo::new("http://example.com/file", "/tmp/file"); + task.mark_failed("Network error"); + + assert_eq!(task.state, DownloadState::Failed); + assert_eq!(task.error, Some("Network error".to_string())); + assert!(task.completed_at.is_some()); + } +} diff --git a/src/downloader/task_manager.rs b/src/downloader/task_manager.rs new file mode 100644 index 0000000..4750517 --- /dev/null +++ b/src/downloader/task_manager.rs @@ -0,0 +1,1009 @@ +//! Download task manager with state tracking and long-polling support + +use super::config::DownloadConfig; +use super::error::{DownloadError, Result}; +use super::state::{DownloadProgress, DownloadState, SpeedCalculator, TaskInfo}; +use super::traits::{Downloader, DownloaderCapabilities}; +use parking_lot::RwLock; +use std::collections::HashMap; +use std::path::PathBuf; +use std::sync::Arc; +use std::time::{Duration, SystemTime}; +use tokio::sync::Notify; +use uuid::Uuid; + +/// Download task manager +/// +/// Manages all download tasks, tracks their state, and provides long-polling support +/// for status updates. +pub struct DownloadTaskManager { + /// Task storage: task_id -> TaskInfo + tasks: Arc>>, + + /// Downloader implementation + downloader: Arc>, + + /// Notification system for state changes (used for long-polling) + notifier: Arc, + + /// Downloader capabilities (cached from downloader at initialization) + capabilities: DownloaderCapabilities, +} + +impl DownloadTaskManager { + /// Create a new task manager with the given downloader + pub fn new(downloader: Box) -> Self { + // Cache capabilities from downloader at initialization + let capabilities = downloader.capabilities().clone(); + + Self { + tasks: Arc::new(RwLock::new(HashMap::new())), + downloader: Arc::new(downloader), + notifier: Arc::new(Notify::new()), + capabilities, + } + } + + /// Create a task manager from configuration + pub fn from_config(config: &DownloadConfig) -> Self { + let downloader = super::create_downloader(config); + Self::new(downloader) + } + + /// Submit a new download task + /// + /// # Arguments + /// * `url` - The URL to download from + /// * `dest_path` - The destination file path + /// + /// # Returns + /// * `Ok(task_id)` - The unique task ID + /// * `Err(DownloadError)` - If the task could not be created + pub fn submit_task( + &self, + url: impl Into, + dest_path: impl Into, + ) -> Result { + let task_id = Uuid::new_v4().to_string(); + let task = TaskInfo::with_id(task_id.clone(), url, dest_path); + + { + let mut tasks = self.tasks.write(); + if tasks.contains_key(&task_id) { + return Err(DownloadError::task_already_exists(&task_id)); + } + tasks.insert(task_id.clone(), task); + } + + // Notify listeners about new task + self.notifier.notify_waiters(); + + // Start download in background + let manager = self.clone_for_task(); + let task_id_clone = task_id.clone(); + tokio::spawn(async move { + manager.execute_task(&task_id_clone).await; + }); + + Ok(task_id) + } + + /// Submit a new download task with optional headers, cookies, and hub_uuid + /// + /// # Arguments + /// * `url` - The URL to download from + /// * `dest_path` - The destination file path + /// * `headers` - Optional HTTP headers + /// * `cookies` - Optional HTTP cookies + /// * `hub_uuid` - Optional hub UUID for routing to external downloaders + /// + /// # Returns + /// * `Ok(task_id)` - The unique task ID + /// * `Err(DownloadError)` - If the task could not be created + pub fn submit_task_with_options( + &self, + url: impl Into, + dest_path: impl Into, + headers: Option>, + cookies: Option>, + hub_uuid: Option, + ) -> Result { + let task_id = Uuid::new_v4().to_string(); + let task = + TaskInfo::with_options(task_id.clone(), url, dest_path, headers, cookies, hub_uuid); + + { + let mut tasks = self.tasks.write(); + if tasks.contains_key(&task_id) { + return Err(DownloadError::task_already_exists(&task_id)); + } + tasks.insert(task_id.clone(), task); + } + + // Notify listeners about new task + self.notifier.notify_waiters(); + + // Start download in background + let manager = self.clone_for_task(); + let task_id_clone = task_id.clone(); + tokio::spawn(async move { + manager.execute_task(&task_id_clone).await; + }); + + Ok(task_id) + } + + /// Submit multiple download tasks + /// + /// # Returns + /// Vector of task IDs for each submitted task + pub fn submit_batch(&self, tasks: Vec<(String, String)>) -> Result> { + let mut task_ids = Vec::new(); + + for (url, dest_path) in tasks { + let task_id = self.submit_task(url, dest_path)?; + task_ids.push(task_id); + } + + Ok(task_ids) + } + + /// Get task information by ID + pub fn get_task(&self, task_id: &str) -> Result { + let tasks = self.tasks.read(); + tasks + .get(task_id) + .cloned() + .ok_or_else(|| DownloadError::task_not_found(task_id)) + } + + /// Get all tasks + pub fn get_all_tasks(&self) -> Vec { + let tasks = self.tasks.read(); + tasks.values().cloned().collect() + } + + /// Get tasks by state + pub fn get_tasks_by_state(&self, state: DownloadState) -> Vec { + let tasks = self.tasks.read(); + tasks + .values() + .filter(|task| task.state == state) + .cloned() + .collect() + } + + /// Get active tasks (pending or downloading) + pub fn get_active_tasks(&self) -> Vec { + let tasks = self.tasks.read(); + tasks + .values() + .filter(|task| task.state.is_active()) + .cloned() + .collect() + } + + /// Get downloader capabilities + /// + /// Returns the capabilities of the underlying downloader implementation. + /// These capabilities are determined at initialization time. + pub fn get_capabilities(&self) -> &DownloaderCapabilities { + &self.capabilities + } + + /// Cancel a task + pub fn cancel_task(&self, task_id: &str) -> Result<()> { + let mut tasks = self.tasks.write(); + let task = tasks + .get_mut(task_id) + .ok_or_else(|| DownloadError::task_not_found(task_id))?; + + if task.state.is_terminal() { + return Err(DownloadError::invalid_input(format!( + "Task {} is already in terminal state: {:?}", + task_id, task.state + ))); + } + + task.mark_cancelled(); + self.notifier.notify_waiters(); + + Ok(()) + } + + /// Pause a download task + /// + /// # Arguments + /// * `task_id` - The task ID to pause + /// + /// # Returns + /// * `Ok(())` - Task paused successfully + /// * `Err(DownloadError)` - If task not found or cannot be paused + pub async fn pause_task(&self, task_id: &str) -> Result<()> { + // Check if downloader supports pause + if !self.capabilities.supports_pause { + return Err(DownloadError::unsupported( + "This downloader does not support pause functionality", + )); + } + + // Get task URL and verify state + let url = { + let tasks = self.tasks.read(); + let task = tasks + .get(task_id) + .ok_or_else(|| DownloadError::task_not_found(task_id))?; + + if !task.state.is_pausable() { + return Err(DownloadError::invalid_input(format!( + "Task {} cannot be paused in state: {:?}", + task_id, task.state + ))); + } + + task.url.clone() + }; + + // Call downloader pause + self.downloader.pause(&url).await?; + + // Update task state + { + let mut tasks = self.tasks.write(); + if let Some(task) = tasks.get_mut(task_id) { + task.mark_stopped(); + } + } + + self.notifier.notify_waiters(); + Ok(()) + } + + /// Resume a paused download task + /// + /// # Arguments + /// * `task_id` - The task ID to resume + /// + /// # Returns + /// * `Ok(())` - Task resumed successfully + /// * `Err(DownloadError)` - If task not found or cannot be resumed + pub async fn resume_task(&self, task_id: &str) -> Result<()> { + // Check if downloader supports resume + if !self.capabilities.supports_resume { + return Err(DownloadError::unsupported( + "This downloader does not support resume functionality", + )); + } + + // Get task details and verify state + let (url, dest_path, headers, cookies, hub_uuid) = { + let tasks = self.tasks.read(); + let task = tasks + .get(task_id) + .ok_or_else(|| DownloadError::task_not_found(task_id))?; + + if !task.state.is_resumable() { + return Err(DownloadError::invalid_input(format!( + "Task {} cannot be resumed from state: {:?}", + task_id, task.state + ))); + } + + ( + task.url.clone(), + task.dest_path.clone(), + task.headers.clone(), + task.cookies.clone(), + task.hub_uuid.clone(), + ) + }; + + // Mark as resumed + { + let mut tasks = self.tasks.write(); + if let Some(task) = tasks.get_mut(task_id) { + task.mark_resumed(); + } + } + + self.notifier.notify_waiters(); + + // Restart download in background + let manager = self.clone_for_task(); + let task_id_clone = task_id.to_string(); + tokio::spawn(async move { + // Create request options with headers, cookies, and hub_uuid metadata + let mut metadata = std::collections::HashMap::new(); + if let Some(uuid) = &hub_uuid { + metadata.insert("hub_uuid".to_string(), uuid.clone()); + } + + let options = if headers.is_some() || cookies.is_some() || !metadata.is_empty() { + Some(super::traits::RequestOptions { + headers, + cookies, + metadata: if metadata.is_empty() { + None + } else { + Some(metadata) + }, + }) + } else { + None + }; + + // Create progress callback with speed calculator + let tasks_clone = manager.tasks.clone(); + let task_id_for_callback = task_id_clone.clone(); + let notifier_clone = manager.notifier.clone(); + let speed_calc = Arc::new(RwLock::new(SpeedCalculator::default_window())); + + let progress_callback = Box::new(move |downloaded: u64, total: Option| { + // Record sample and calculate speed + let speed = { + let mut calc = speed_calc.write(); + calc.record(downloaded); + calc.speed_bytes_per_sec() + }; + + // Update task progress + let mut tasks = tasks_clone.write(); + if let Some(task) = tasks.get_mut(&task_id_for_callback) { + task.update_progress(DownloadProgress::with_speed(downloaded, total, speed)); + notifier_clone.notify_waiters(); + } + }); + + // Resume download + let result = manager + .downloader + .download( + &url, + &PathBuf::from(&dest_path), + Some(progress_callback), + options, + ) + .await; + + // Update task state + { + let mut tasks = manager.tasks.write(); + if let Some(task) = tasks.get_mut(&task_id_clone) { + match result { + Ok(_) => task.mark_completed(), + Err(e) => task.mark_failed(e.message), + } + } + } + + manager.notifier.notify_waiters(); + }); + + Ok(()) + } + + /// Wait for task state change (long-polling support) + /// + /// # Arguments + /// * `task_id` - The task ID to monitor + /// * `timeout` - Maximum time to wait for a change + /// + /// # Returns + /// * `Ok(TaskInfo)` - The updated task info + /// * `Err(DownloadError)` - If task not found or timeout occurred + pub async fn wait_for_change(&self, task_id: &str, timeout: Duration) -> Result { + let initial_state = { + let tasks = self.tasks.read(); + let task = tasks + .get(task_id) + .ok_or_else(|| DownloadError::task_not_found(task_id))?; + task.state + }; + + // If already in terminal state, return immediately + if initial_state.is_terminal() { + return self.get_task(task_id); + } + + // Wait for notification with timeout + let notifier = self.notifier.clone(); + let result = tokio::time::timeout(timeout, async { + loop { + notifier.notified().await; + + let tasks = self.tasks.read(); + if let Some(task) = tasks.get(task_id) { + if task.state != initial_state { + return Ok(task.clone()); + } + } else { + return Err(DownloadError::task_not_found(task_id)); + } + } + }) + .await; + + match result { + Ok(task_result) => task_result, + Err(_) => { + // Timeout - return current state + self.get_task(task_id) + } + } + } + + /// Remove completed/failed tasks older than the specified duration + pub fn cleanup_old_tasks(&self, max_age: Duration) { + let mut tasks = self.tasks.write(); + let now = SystemTime::now(); + + tasks.retain(|_, task| { + if !task.state.is_terminal() { + return true; // Keep active tasks + } + + if let Some(completed_at) = task.completed_at { + if let Ok(age) = now.duration_since(completed_at) { + return age < max_age; + } + } + + true + }); + + self.notifier.notify_waiters(); + } + + /// Remove a specific task + pub fn remove_task(&self, task_id: &str) -> Result<()> { + let mut tasks = self.tasks.write(); + let task = tasks + .get(task_id) + .ok_or_else(|| DownloadError::task_not_found(task_id))?; + + if !task.state.is_terminal() { + return Err(DownloadError::invalid_input(format!( + "Cannot remove active task: {}", + task_id + ))); + } + + tasks.remove(task_id); + self.notifier.notify_waiters(); + + Ok(()) + } + + /// Execute a download task + async fn execute_task(&self, task_id: &str) { + // Mark task as started + { + let mut tasks = self.tasks.write(); + if let Some(task) = tasks.get_mut(task_id) { + if task.state == DownloadState::Cancelled { + return; // Task was cancelled before it started + } + task.mark_started(); + } else { + return; // Task not found + } + } + + self.notifier.notify_waiters(); + + // Get task details + let (url, dest_path, headers, cookies, hub_uuid) = { + let tasks = self.tasks.read(); + if let Some(task) = tasks.get(task_id) { + ( + task.url.clone(), + task.dest_path.clone(), + task.headers.clone(), + task.cookies.clone(), + task.hub_uuid.clone(), + ) + } else { + return; + } + }; + + // Create request options with headers, cookies, and hub_uuid metadata + let mut metadata = std::collections::HashMap::new(); + if let Some(uuid) = &hub_uuid { + metadata.insert("hub_uuid".to_string(), uuid.clone()); + } + + let options = if headers.is_some() || cookies.is_some() || !metadata.is_empty() { + Some(super::traits::RequestOptions { + headers, + cookies, + metadata: if metadata.is_empty() { + None + } else { + Some(metadata) + }, + }) + } else { + None + }; + + // Create progress callback with speed calculator + let tasks_clone = self.tasks.clone(); + let task_id_clone = task_id.to_string(); + let notifier_clone = self.notifier.clone(); + let speed_calc = Arc::new(RwLock::new(SpeedCalculator::default_window())); + + let progress_callback = Box::new(move |downloaded: u64, total: Option| { + // Record sample and calculate speed + let speed = { + let mut calc = speed_calc.write(); + calc.record(downloaded); + calc.speed_bytes_per_sec() + }; + + // Update task progress + let mut tasks = tasks_clone.write(); + if let Some(task) = tasks.get_mut(&task_id_clone) { + task.update_progress(DownloadProgress::with_speed(downloaded, total, speed)); + notifier_clone.notify_waiters(); + } + }); + + // Perform download + let result = self + .downloader + .download( + &url, + &PathBuf::from(&dest_path), + Some(progress_callback), + options, + ) + .await; + + // Update task state + { + let mut tasks = self.tasks.write(); + if let Some(task) = tasks.get_mut(task_id) { + match result { + Ok(_) => task.mark_completed(), + Err(e) => task.mark_failed(e.message), + } + } + } + + self.notifier.notify_waiters(); + } + + /// Clone for task execution + fn clone_for_task(&self) -> Self { + Self { + tasks: self.tasks.clone(), + downloader: self.downloader.clone(), + notifier: self.notifier.clone(), + capabilities: self.capabilities.clone(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::downloader::TraumaDownloader; + + #[test] + fn test_create_manager() { + let downloader = Box::new(TraumaDownloader::default_settings()); + let manager = DownloadTaskManager::new(downloader); + + let all_tasks = manager.get_all_tasks(); + assert!(all_tasks.is_empty()); + } + + #[tokio::test] + async fn test_submit_task() { + let config = DownloadConfig::default(); + let manager = DownloadTaskManager::from_config(&config); + + let task_id = manager + .submit_task("http://example.com/file", "/tmp/file") + .unwrap(); + assert!(!task_id.is_empty()); + + let task = manager.get_task(&task_id).unwrap(); + assert_eq!(task.url, "http://example.com/file"); + assert_eq!(task.dest_path, "/tmp/file"); + } + + #[tokio::test] + async fn test_cancel_task() { + let config = DownloadConfig::default(); + let manager = DownloadTaskManager::from_config(&config); + + let task_id = manager + .submit_task("http://example.com/file", "/tmp/file") + .unwrap(); + + // Wait a bit for task to potentially start + tokio::time::sleep(Duration::from_millis(100)).await; + + let result = manager.cancel_task(&task_id); + // May succeed or fail depending on task state + let _ = result; + } + + #[tokio::test] + async fn test_get_tasks_by_state() { + let config = DownloadConfig::default(); + let manager = DownloadTaskManager::from_config(&config); + + let _ = manager.submit_task("http://example.com/file1", "/tmp/file1"); + let _ = manager.submit_task("http://example.com/file2", "/tmp/file2"); + + let pending_tasks = manager.get_tasks_by_state(DownloadState::Pending); + assert!(pending_tasks.len() <= 2); // May have started downloading + } + + // ======================================================================== + // Integration Tests - TaskManager with Mock HTTP Server + // ======================================================================== + + /// Test Scenario 5: Headers and Cookies transmission to downloader + /// Verifies: Custom headers and cookies are correctly sent to server + #[tokio::test] + async fn test_custom_headers_and_cookies_transmission() { + use mockito::Server; + use std::collections::HashMap; + use tempfile::tempdir; + + // Create Mock HTTP server + let mut server = Server::new_async().await; + + // Mock verifies custom headers and cookies + // Note: Cookie order is not guaranteed due to HashMap iteration + let mock = server + .mock("GET", "/protected-file.txt") + .match_header("authorization", "Bearer test-token-123") + .match_header("x-custom-header", "custom-value") + .match_header( + "cookie", + mockito::Matcher::Regex(".*session_id=abc123.*".to_string()), + ) + .match_header( + "cookie", + mockito::Matcher::Regex(".*user_id=456.*".to_string()), + ) + .with_status(200) + .with_body(b"Protected content") + .create(); + + // Prepare custom headers and cookies + let mut headers = HashMap::new(); + headers.insert( + "Authorization".to_string(), + "Bearer test-token-123".to_string(), + ); + headers.insert("X-Custom-Header".to_string(), "custom-value".to_string()); + + let mut cookies = HashMap::new(); + cookies.insert("session_id".to_string(), "abc123".to_string()); + cookies.insert("user_id".to_string(), "456".to_string()); + + // Create task manager + let config = DownloadConfig::default(); + let manager = DownloadTaskManager::from_config(&config); + + // Prepare download destination + let temp_dir = tempdir().unwrap(); + let dest = temp_dir.path().join("protected-file.txt"); + let url = format!("{}/protected-file.txt", server.url()); + + // Submit task with headers and cookies + let task_id = manager + .submit_task_with_options( + &url, + dest.to_str().unwrap(), + Some(headers), + Some(cookies), + None, + ) + .unwrap(); + + // Wait for download to complete + tokio::time::sleep(Duration::from_millis(500)).await; + + // Verify task status + let task_info = manager.get_task(&task_id).unwrap(); + assert!( + task_info.state == DownloadState::Completed + || task_info.state == DownloadState::Downloading, + "Task should be completed or downloading" + ); + + // Wait for task to fully complete + for _ in 0..20 { + tokio::time::sleep(Duration::from_millis(100)).await; + let task_info = manager.get_task(&task_id).unwrap(); + if task_info.state == DownloadState::Completed { + break; + } + } + + // Verify mock was called (headers and cookies transmitted correctly) + mock.assert(); + + // Verify file downloaded successfully + assert!(dest.exists(), "File should be downloaded"); + let content = std::fs::read_to_string(&dest).unwrap(); + assert_eq!(content, "Protected content"); + } + + /// Test Scenario 6: Task lifecycle state tracking + /// Verifies: Complete flow from Pending → Downloading → Completed + #[tokio::test] + async fn test_task_lifecycle_and_state_transitions() { + use mockito::Server; + use tempfile::tempdir; + + // Create Mock HTTP server + let mut server = Server::new_async().await; + let test_data = b"Task lifecycle test data"; + + let mock = server + .mock("GET", "/lifecycle-test.txt") + .with_status(200) + .with_header("content-length", &test_data.len().to_string()) + .with_body(test_data.as_slice()) + .create(); + + // Create task manager + let config = DownloadConfig::default(); + let manager = DownloadTaskManager::from_config(&config); + + // Prepare download destination + let temp_dir = tempdir().unwrap(); + let dest = temp_dir.path().join("lifecycle-test.txt"); + let url = format!("{}/lifecycle-test.txt", server.url()); + + // Submit task + let task_id = manager.submit_task(&url, dest.to_str().unwrap()).unwrap(); + + // Track state changes + let mut states_observed = Vec::new(); + + // Initial state should be Pending + let initial_task = manager.get_task(&task_id).unwrap(); + states_observed.push(initial_task.state); + assert_eq!( + initial_task.state, + DownloadState::Pending, + "Initial state should be Pending" + ); + + // Monitor state transitions (max 3 seconds) + for _ in 0..30 { + tokio::time::sleep(Duration::from_millis(100)).await; + + if let Ok(task_info) = manager.get_task(&task_id) { + let current_state = task_info.state; + + // Record new state + if states_observed.last() != Some(¤t_state) { + states_observed.push(current_state); + } + + // If completed, verify file exists + if current_state == DownloadState::Completed { + assert!(dest.exists(), "File should exist when completed"); + let content = std::fs::read(&dest).unwrap(); + assert_eq!(content, test_data); + break; + } + + // If failed, record error + if current_state == DownloadState::Failed { + if let Some(error) = &task_info.error { + eprintln!("Download failed: {}", error); + } + panic!("Download should not fail in this test"); + } + } + } + + // Verify state transition sequence + assert!( + states_observed.contains(&DownloadState::Pending), + "Should have observed Pending state" + ); + assert!( + states_observed.contains(&DownloadState::Downloading) + || states_observed.contains(&DownloadState::Completed), + "Should have observed Downloading or Completed state" + ); + + // Final state should be Completed + let final_task = manager.get_task(&task_id).unwrap(); + assert_eq!( + final_task.state, + DownloadState::Completed, + "Final state should be Completed. States observed: {:?}", + states_observed + ); + + // Verify progress information + assert_eq!( + final_task.progress.downloaded_bytes, + test_data.len() as u64, + "Downloaded bytes should match content length" + ); + assert_eq!( + final_task.progress.total_bytes, + Some(test_data.len() as u64), + "Total bytes should be known" + ); + + mock.assert(); + } + + /// Test Scenario 7: Long-polling wait_for_change mechanism + /// Verifies: Correct notification on state change, timeout mechanism works + #[tokio::test] + async fn test_wait_for_change_notification() { + use mockito::Server; + use tempfile::tempdir; + + // Create Mock HTTP server with slow response + let mut server = Server::new_async().await; + let test_data = vec![b'W'; 2048]; // 2KB data + + let mock = server + .mock("GET", "/slow-file.bin") + .with_status(200) + .with_header("content-length", &test_data.len().to_string()) + .with_chunked_body(move |w| { + // Send chunks slowly + for chunk in test_data.chunks(512) { + std::thread::sleep(Duration::from_millis(200)); + w.write_all(chunk)?; + } + Ok(()) + }) + .create(); + + // Create task manager + let config = DownloadConfig::default(); + let manager = DownloadTaskManager::from_config(&config); + + // Prepare download destination + let temp_dir = tempdir().unwrap(); + let dest = temp_dir.path().join("slow-file.bin"); + let url = format!("{}/slow-file.bin", server.url()); + + // Submit task + let task_id = manager.submit_task(&url, dest.to_str().unwrap()).unwrap(); + + // Test 1: Wait for state change (should succeed) + let wait_result = manager + .wait_for_change(&task_id, Duration::from_secs(2)) + .await; + assert!(wait_result.is_ok(), "Wait for change should succeed"); + + let task_after_change = wait_result.unwrap(); + assert!( + task_after_change.state == DownloadState::Downloading + || task_after_change.state == DownloadState::Completed, + "State should have changed from Pending" + ); + + // Test 2: Short timeout test (should timeout when state is stable) + // Wait for task to complete or enter stable state + tokio::time::sleep(Duration::from_secs(1)).await; + + // Test 3: Verify task eventually completes + for _ in 0..15 { + tokio::time::sleep(Duration::from_millis(300)).await; + let task_info = manager.get_task(&task_id).unwrap(); + + if task_info.state == DownloadState::Completed { + // Verify download success + assert!(dest.exists(), "File should exist"); + let file_size = std::fs::metadata(&dest).unwrap().len(); + assert_eq!(file_size, 2048, "File size should match"); + break; + } + } + + mock.assert(); + } + + /// Test Scenario 8: Batch task submission and management + /// Verifies: Batch submission, deduplication, concurrency control + #[tokio::test] + async fn test_batch_task_submission_and_management() { + use mockito::Server; + use tempfile::tempdir; + + // Create Mock HTTP server + let mut server = Server::new_async().await; + + // Create 3 different files + let files = vec![ + ("batch1.txt", b"Batch file 1".to_vec()), + ("batch2.txt", b"Batch file 2".to_vec()), + ("batch3.txt", b"Batch file 3".to_vec()), + ]; + + let mut mocks = Vec::new(); + for (filename, content) in &files { + let mock = server + .mock("GET", format!("/{}", filename).as_str()) + .with_status(200) + .with_header("content-length", &content.len().to_string()) + .with_body(content.as_slice()) + .create(); + mocks.push(mock); + } + + // Create task manager + let config = DownloadConfig::default(); + let manager = DownloadTaskManager::from_config(&config); + + // Prepare batch tasks + let temp_dir = tempdir().unwrap(); + let mut tasks = Vec::new(); + + for (filename, _) in &files { + let url = format!("{}/{}", server.url(), filename); + let dest = temp_dir.path().join(filename); + tasks.push((url, dest.to_string_lossy().to_string())); + } + + // Submit batch tasks + let task_ids = manager.submit_batch(tasks).unwrap(); + assert_eq!(task_ids.len(), 3, "Should submit 3 tasks"); + + // Wait for all tasks to complete + for task_id in &task_ids { + for _ in 0..30 { + tokio::time::sleep(Duration::from_millis(100)).await; + + if let Ok(task_info) = manager.get_task(task_id) { + if task_info.state == DownloadState::Completed { + break; + } + if task_info.state == DownloadState::Failed { + panic!("Task {} failed: {:?}", task_id, task_info.error); + } + } + } + } + + // Verify all files downloaded successfully + for (filename, expected_content) in &files { + let dest = temp_dir.path().join(filename); + assert!(dest.exists(), "File {} should exist", filename); + + let content = std::fs::read(&dest).unwrap(); + assert_eq!( + content, *expected_content, + "Content of {} should match", + filename + ); + } + + // Verify all mocks were called + for mock in mocks { + mock.assert(); + } + + // Verify get_all_tasks returns all tasks + let all_tasks = manager.get_all_tasks(); + assert!(all_tasks.len() >= 3, "Should have at least 3 tasks"); + + // Verify get_tasks_by_state + let completed_tasks = manager.get_tasks_by_state(DownloadState::Completed); + assert!( + completed_tasks.len() >= 3, + "Should have at least 3 completed tasks" + ); + } +} diff --git a/src/downloader/traits.rs b/src/downloader/traits.rs new file mode 100644 index 0000000..ba8cdde --- /dev/null +++ b/src/downloader/traits.rs @@ -0,0 +1,262 @@ +//! Core trait definitions for the downloader system + +use super::error::Result; +use async_trait::async_trait; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use std::path::Path; + +/// Progress callback function type +/// Parameters: (downloaded_bytes, total_bytes_optional) +pub type ProgressCallback = Box) + Send + Sync>; + +/// HTTP request options for downloads +#[derive(Debug, Clone, Default)] +pub struct RequestOptions { + /// HTTP headers to include in the request + pub headers: Option>, + /// HTTP cookies to include in the request + pub cookies: Option>, + /// Extra metadata for dispatch (e.g., hub_uuid for routing to external downloaders) + pub metadata: Option>, +} + +/// Downloader capability information +/// +/// This struct defines what features a downloader implementation supports. +/// These capabilities are determined at initialization time and remain constant +/// throughout the downloader's lifetime. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)] +pub struct DownloaderCapabilities { + /// Whether the downloader supports pausing downloads + pub supports_pause: bool, + /// Whether the downloader supports resuming paused downloads + pub supports_resume: bool, + /// Whether the downloader supports cancelling downloads + pub supports_cancellation: bool, + /// Whether the downloader supports HTTP Range requests for breakpoint resume + pub supports_range_requests: bool, + /// Whether the downloader supports batch download operations + pub supports_batch_download: bool, +} + +impl DownloaderCapabilities { + /// Create a new DownloaderCapabilities with all features enabled + pub fn all_enabled() -> Self { + Self { + supports_pause: true, + supports_resume: true, + supports_cancellation: true, + supports_range_requests: true, + supports_batch_download: true, + } + } + + /// Create a new DownloaderCapabilities with all features disabled + pub fn all_disabled() -> Self { + Self::default() + } +} + +/// Core downloader trait - all downloader implementations must implement this +/// +/// This trait provides a pluggable interface for different download backends, +/// allowing easy switching between implementations (trauma, reqwest, CLI tools, etc.) +#[async_trait] +pub trait Downloader: Send + Sync { + /// Download a single file from URL to destination path + /// + /// # Arguments + /// * `url` - The URL to download from + /// * `dest` - The destination file path + /// * `progress` - Optional progress callback (downloaded_bytes, total_bytes) + /// * `options` - Optional request options (headers, cookies) + /// + /// # Returns + /// * `Ok(())` - Download completed successfully + /// * `Err(DownloadError)` - Download failed + async fn download( + &self, + url: &str, + dest: &Path, + progress: Option, + options: Option, + ) -> Result<()>; + + /// Download multiple files concurrently + /// + /// # Arguments + /// * `tasks` - Vector of (url, destination_path) tuples + /// + /// # Returns + /// * `Ok(Vec>)` - Vector of results for each download task + async fn download_batch(&self, tasks: Vec<(String, std::path::PathBuf)>) -> Vec>; + + /// Get the name of this downloader implementation + fn name(&self) -> &str; + + /// Get the capabilities of this downloader implementation + /// + /// This method returns a reference to the capability information that was + /// determined at initialization time. Capabilities define what features + /// the downloader supports (pause, resume, cancellation, etc.). + /// + /// # Returns + /// A reference to the DownloaderCapabilities struct + fn capabilities(&self) -> &DownloaderCapabilities; + + /// Cancel an ongoing download (if supported by the implementation) + /// + /// Default implementation does nothing and returns Ok + async fn cancel(&self, _url: &str) -> Result<()> { + Ok(()) + } + + /// Pause an ongoing download (if supported by the implementation) + /// + /// # Arguments + /// * `url` - The URL of the download to pause + /// + /// # Returns + /// * `Ok(())` - Download paused successfully + /// * `Err(DownloadError)` - Failed to pause download + /// + /// Default implementation returns an error + async fn pause(&self, _url: &str) -> Result<()> { + Err(super::error::DownloadError::unsupported( + "Pause not supported by this downloader", + )) + } + + /// Resume a paused download (if supported by the implementation) + /// + /// # Arguments + /// * `url` - The URL of the download to resume + /// + /// # Returns + /// * `Ok(())` - Download resumed successfully + /// * `Err(DownloadError)` - Failed to resume download + /// + /// Default implementation returns an error + async fn resume(&self, _url: &str) -> Result<()> { + Err(super::error::DownloadError::unsupported( + "Resume not supported by this downloader", + )) + } + + /// Check if this downloader supports cancellation + /// + /// # Deprecated + /// Use `capabilities().supports_cancellation` instead. + /// This method will be removed in a future version. + #[deprecated( + since = "0.2.0", + note = "Use capabilities().supports_cancellation instead" + )] + fn supports_cancellation(&self) -> bool { + self.capabilities().supports_cancellation + } + + /// Check if this downloader supports pause/resume + /// + /// # Deprecated + /// Use `capabilities().supports_pause` instead. + /// This method will be removed in a future version. + #[deprecated(since = "0.2.0", note = "Use capabilities().supports_pause instead")] + fn supports_pause(&self) -> bool { + self.capabilities().supports_pause + } + + /// Check if this downloader supports resume/partial downloads + /// + /// # Deprecated + /// Use `capabilities().supports_resume` instead. + /// This method will be removed in a future version. + #[deprecated(since = "0.2.0", note = "Use capabilities().supports_resume instead")] + fn supports_resume(&self) -> bool { + self.capabilities().supports_resume + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::path::PathBuf; + + // Mock downloader for testing + struct MockDownloader { + capabilities: DownloaderCapabilities, + } + + impl MockDownloader { + fn new() -> Self { + Self { + capabilities: DownloaderCapabilities::all_disabled(), + } + } + } + + #[async_trait] + impl Downloader for MockDownloader { + async fn download( + &self, + _url: &str, + _dest: &Path, + _progress: Option, + _options: Option, + ) -> Result<()> { + Ok(()) + } + + async fn download_batch(&self, tasks: Vec<(String, PathBuf)>) -> Vec> { + tasks.into_iter().map(|_| Ok(())).collect() + } + + fn name(&self) -> &str { + "mock" + } + + fn capabilities(&self) -> &DownloaderCapabilities { + &self.capabilities + } + } + + #[tokio::test] + async fn test_mock_downloader() { + let downloader = MockDownloader::new(); + assert_eq!(downloader.name(), "mock"); + + // Test new capabilities() method + let caps = downloader.capabilities(); + assert!(!caps.supports_cancellation); + assert!(!caps.supports_pause); + assert!(!caps.supports_resume); + assert!(!caps.supports_range_requests); + assert!(!caps.supports_batch_download); + + // Test deprecated methods (should still work) + #[allow(deprecated)] + { + assert!(!downloader.supports_cancellation()); + assert!(!downloader.supports_pause()); + assert!(!downloader.supports_resume()); + } + + let result = downloader + .download( + "http://example.com/file", + Path::new("/tmp/file"), + None, + None, + ) + .await; + assert!(result.is_ok()); + + // Test default pause/resume implementations + let pause_result = downloader.pause("http://example.com/file").await; + assert!(pause_result.is_err()); + + let resume_result = downloader.resume("http://example.com/file").await; + assert!(resume_result.is_err()); + } +} diff --git a/src/downloader/trauma_impl.rs b/src/downloader/trauma_impl.rs new file mode 100644 index 0000000..c350617 --- /dev/null +++ b/src/downloader/trauma_impl.rs @@ -0,0 +1,751 @@ +//! Reqwest-based downloader implementation with pause/resume support + +use super::error::{DownloadError, Result}; +use super::traits::{Downloader, DownloaderCapabilities, ProgressCallback, RequestOptions}; +use async_trait::async_trait; +use futures::StreamExt; +use parking_lot::RwLock; +use std::collections::HashMap; +use std::path::{Path, PathBuf}; +use std::sync::Arc; +use tokio::fs::{File, OpenOptions}; +use tokio::io::AsyncWriteExt; +use tokio_util::sync::CancellationToken; + +/// Task state for managing pause/resume +#[derive(Clone)] +struct TaskState { + url: String, + dest: PathBuf, + cancel_token: CancellationToken, + supports_range: Option, +} + +/// Reqwest-based downloader implementation +/// +/// This implementation uses reqwest with rustls for secure, concurrent downloads +/// and supports pause/resume with HTTP Range requests +pub struct TraumaDownloader { + max_concurrent: usize, + retries: usize, + timeout_seconds: u64, + client: Arc, + // Track active download tasks for pause/resume + tasks: Arc>>, + // Capabilities of this downloader + capabilities: DownloaderCapabilities, +} + +impl TraumaDownloader { + /// Create a new TraumaDownloader with specified parameters + /// + /// # Arguments + /// * `max_concurrent` - Maximum number of concurrent downloads + /// * `retries` - Number of retry attempts for failed downloads + /// * `timeout_seconds` - Timeout for each download in seconds + pub fn new(max_concurrent: usize, retries: usize, timeout_seconds: u64) -> Self { + let client = reqwest::Client::builder() + .timeout(std::time::Duration::from_secs(timeout_seconds)) + .build() + .expect("Failed to create HTTP client"); + + Self { + max_concurrent, + retries, + timeout_seconds, + client: Arc::new(client), + tasks: Arc::new(RwLock::new(HashMap::new())), + capabilities: DownloaderCapabilities::all_enabled(), + } + } + + /// Create a downloader with default settings + pub fn default_settings() -> Self { + Self::new(4, 3, 300) + } + + /// Test if a server supports HTTP Range requests + async fn test_range_support(client: &reqwest::Client, url: &str) -> bool { + // Try HEAD request first + if let Ok(response) = client.head(url).send().await { + if let Some(accept_ranges) = response.headers().get("accept-ranges") { + if let Ok(value) = accept_ranges.to_str() { + return value.to_lowercase() == "bytes"; + } + } + } + + // Fallback: try a fake range request + if let Ok(response) = client.get(url).header("Range", "bytes=0-0").send().await { + return response.status() == reqwest::StatusCode::PARTIAL_CONTENT + || response.status() == reqwest::StatusCode::OK; + } + + false + } + + /// Download a file with retry and pause support + async fn download_with_retry( + &self, + url: &str, + dest: &Path, + progress: Option<&ProgressCallback>, + cancel_token: &CancellationToken, + options: Option<&RequestOptions>, + ) -> Result<()> { + let mut last_error = None; + + for attempt in 0..=self.retries { + if cancel_token.is_cancelled() { + return Err(DownloadError::cancelled("Download was paused")); + } + + // For retries, we don't pass progress callback to avoid multiple progress reports + let current_progress = if attempt == 0 { progress } else { None }; + + match self + .download_once(url, dest, current_progress, cancel_token, options) + .await + { + Ok(()) => return Ok(()), + Err(e) => { + if cancel_token.is_cancelled() { + return Err(DownloadError::cancelled("Download was paused")); + } + last_error = Some(e); + if attempt < self.retries { + tokio::time::sleep(std::time::Duration::from_secs(1 << attempt)).await; + } + } + } + } + + Err(last_error.unwrap_or_else(|| DownloadError::network("Download failed".to_string()))) + } + + /// Download a file once (no retry), with pause support + async fn download_once( + &self, + url: &str, + dest: &Path, + progress: Option<&ProgressCallback>, + cancel_token: &CancellationToken, + options: Option<&RequestOptions>, + ) -> Result<()> { + // Check if temp file exists (resume case) + let temp_dest = dest.with_extension("tmp"); + let existing_size = if temp_dest.exists() { + tokio::fs::metadata(&temp_dest) + .await + .ok() + .map(|m| m.len()) + .unwrap_or(0) + } else { + 0 + }; + + // Test range support if not already known + let supports_range = if existing_size > 0 { + Self::test_range_support(&self.client, url).await + } else { + false + }; + + // Build request with Range header if resuming + let mut request = self.client.get(url); + if existing_size > 0 && supports_range { + request = request.header("Range", format!("bytes={}-", existing_size)); + } + + // Apply custom headers if provided + if let Some(opts) = options { + if let Some(headers) = &opts.headers { + for (key, value) in headers { + request = request.header(key, value); + } + } + // Apply cookies if provided + if let Some(cookies) = &opts.cookies { + let cookie_string = cookies + .iter() + .map(|(k, v)| format!("{}={}", k, v)) + .collect::>() + .join("; "); + if !cookie_string.is_empty() { + request = request.header("Cookie", cookie_string); + } + } + } + + // Send request + let response = request + .send() + .await + .map_err(|e| DownloadError::network(format!("Failed to send request: {}", e)))?; + + // Check status + let status = response.status(); + if !status.is_success() && status != reqwest::StatusCode::PARTIAL_CONTENT { + return Err(DownloadError::network(format!( + "HTTP error {}: {}", + status, url + ))); + } + + // Get content length + let content_length = response.content_length(); + let total_size = if status == reqwest::StatusCode::PARTIAL_CONTENT { + // For partial content, add existing size to content length + content_length.map(|len| len + existing_size) + } else { + content_length + }; + + // Open file (append mode if resuming, create mode otherwise) + let mut file = if existing_size > 0 && status == reqwest::StatusCode::PARTIAL_CONTENT { + OpenOptions::new() + .append(true) + .open(&temp_dest) + .await + .map_err(|e| DownloadError::file_system(format!("Failed to open file: {}", e)))? + } else { + File::create(&temp_dest) + .await + .map_err(|e| DownloadError::file_system(format!("Failed to create file: {}", e)))? + }; + + // Stream download with progress tracking + let mut stream = response.bytes_stream(); + let mut downloaded: u64 = existing_size; + + while let Some(chunk_result) = stream.next().await { + // Check for pause signal + if cancel_token.is_cancelled() { + // Flush and close file before pausing + file.flush().await.ok(); + drop(file); + return Err(DownloadError::cancelled("Download paused")); + } + + let chunk = chunk_result + .map_err(|e| DownloadError::network(format!("Failed to read chunk: {}", e)))?; + + file.write_all(&chunk) + .await + .map_err(|e| DownloadError::file_system(format!("Failed to write chunk: {}", e)))?; + + downloaded += chunk.len() as u64; + + // Call progress callback + if let Some(callback) = progress { + callback(downloaded, total_size); + } + } + + // Flush and close file + file.flush() + .await + .map_err(|e| DownloadError::file_system(format!("Failed to flush file: {}", e)))?; + drop(file); + + // Rename temp file to final destination + tokio::fs::rename(&temp_dest, dest).await.map_err(|e| { + DownloadError::file_system(format!("Failed to rename downloaded file: {}", e)) + })?; + + Ok(()) + } + + /// Register a download task + fn register_task(&self, url: String, dest: PathBuf) -> CancellationToken { + let cancel_token = CancellationToken::new(); + let state = TaskState { + url: url.clone(), + dest, + cancel_token: cancel_token.clone(), + supports_range: None, + }; + self.tasks.write().insert(url, state); + cancel_token + } + + /// Unregister a download task + fn unregister_task(&self, url: &str) { + self.tasks.write().remove(url); + } + + /// Get task cancel token + fn get_cancel_token(&self, url: &str) -> Option { + self.tasks + .read() + .get(url) + .map(|state| state.cancel_token.clone()) + } +} + +#[async_trait] +impl Downloader for TraumaDownloader { + async fn download( + &self, + url: &str, + dest: &Path, + progress: Option, + options: Option, + ) -> Result<()> { + // Validate inputs + if url.is_empty() { + return Err(DownloadError::invalid_input("URL cannot be empty")); + } + + // Ensure parent directory exists + if let Some(parent) = dest.parent() { + tokio::fs::create_dir_all(parent).await.map_err(|e| { + DownloadError::file_system(format!("Failed to create directory: {}", e)) + })?; + } + + // Register task + let cancel_token = self.register_task(url.to_string(), dest.to_path_buf()); + + // Start download + let result = self + .download_with_retry( + url, + dest, + progress.as_ref(), + &cancel_token, + options.as_ref(), + ) + .await; + + // Unregister task + self.unregister_task(url); + + result + } + + async fn download_batch(&self, tasks: Vec<(String, PathBuf)>) -> Vec> { + if tasks.is_empty() { + return vec![]; + } + + // Use semaphore to limit concurrent downloads + let semaphore = Arc::new(tokio::sync::Semaphore::new(self.max_concurrent)); + let mut handles = vec![]; + + for (url, dest) in tasks { + let sem = semaphore.clone(); + let cancel_token = self.register_task(url.clone(), dest.clone()); + let downloader = self.clone_for_task(); + let url_for_handle = url.clone(); + + let handle = tokio::spawn(async move { + let _permit = sem.acquire().await.unwrap(); + + // Ensure parent directory exists + if let Some(parent) = dest.parent() { + if let Err(e) = tokio::fs::create_dir_all(parent).await { + return Err(DownloadError::file_system(format!( + "Failed to create directory: {}", + e + ))); + } + } + + // Download with retry + let result = downloader + .download_with_retry(&url, &dest, None, &cancel_token, None) + .await; + + result + }); + + handles.push((url_for_handle, handle)); + } + + // Wait for all downloads to complete + let mut results = vec![]; + for (url, handle) in handles { + match handle.await { + Ok(result) => { + self.unregister_task(&url); + results.push(result); + } + Err(e) => { + self.unregister_task(&url); + results.push(Err(DownloadError::network(format!( + "Task join error: {}", + e + )))); + } + } + } + + results + } + + fn name(&self) -> &str { + "reqwest" + } + + fn capabilities(&self) -> &DownloaderCapabilities { + &self.capabilities + } + + async fn pause(&self, url: &str) -> Result<()> { + if let Some(cancel_token) = self.get_cancel_token(url) { + cancel_token.cancel(); + Ok(()) + } else { + Err(DownloadError::task_not_found(url)) + } + } + + async fn resume(&self, url: &str) -> Result<()> { + // Get task state + let state = self.tasks.read().get(url).cloned(); + + if let Some(task_state) = state { + // Create new cancel token for resumed download + let new_cancel_token = + self.register_task(task_state.url.clone(), task_state.dest.clone()); + + // Start download from where it left off + let result = self + .download_with_retry( + &task_state.url, + &task_state.dest, + None, + &new_cancel_token, + None, + ) + .await; + + if result.is_err() { + self.unregister_task(url); + } + + result + } else { + Err(DownloadError::task_not_found(url)) + } + } +} + +impl TraumaDownloader { + /// Clone essential fields for spawned tasks + fn clone_for_task(&self) -> Self { + Self { + max_concurrent: self.max_concurrent, + retries: self.retries, + timeout_seconds: self.timeout_seconds, + client: self.client.clone(), + tasks: self.tasks.clone(), + capabilities: self.capabilities.clone(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::tempdir; + + #[test] + fn test_create_trauma_downloader() { + let downloader = TraumaDownloader::new(8, 5, 600); + assert_eq!(downloader.name(), "reqwest"); + assert_eq!(downloader.max_concurrent, 8); + assert_eq!(downloader.retries, 5); + + // Test capabilities + let caps = downloader.capabilities(); + assert!(caps.supports_pause); + assert!(caps.supports_resume); + assert!(caps.supports_cancellation); + } + + #[tokio::test] + async fn test_download_invalid_url() { + let downloader = TraumaDownloader::default_settings(); + let temp_dir = tempdir().unwrap(); + let dest = temp_dir.path().join("test.txt"); + + let result = downloader.download("", &dest, None, None).await; + assert!(result.is_err()); + + if let Err(e) = result { + assert_eq!(e.kind, super::super::error::ErrorKind::InvalidInput); + } + } + + #[tokio::test] + async fn test_download_batch_empty() { + let downloader = TraumaDownloader::default_settings(); + let results = downloader.download_batch(vec![]).await; + assert!(results.is_empty()); + } + + #[tokio::test] + async fn test_pause_nonexistent_task() { + let downloader = TraumaDownloader::default_settings(); + let result = downloader.pause("http://nonexistent.com/file").await; + assert!(result.is_err()); + } + + // ======================================================================== + // Integration Tests with Mock HTTP Server + // ======================================================================== + + /// Test Scenario 1: Complete file download flow + /// Verifies: File downloaded correctly, content matches, temp file cleanup + #[tokio::test] + async fn test_download_complete_file() { + use mockito::Server; + + // Create Mock HTTP server + let mut server = Server::new_async().await; + let test_data = b"Hello, this is test data for download!"; + + let mock = server + .mock("GET", "/test-file.txt") + .with_status(200) + .with_header("content-length", &test_data.len().to_string()) + .with_body(test_data.as_slice()) + .create(); + + // Prepare download destination + let temp_dir = tempdir().unwrap(); + let dest = temp_dir.path().join("downloaded.txt"); + + // Execute download + let downloader = TraumaDownloader::default_settings(); + let url = format!("{}/test-file.txt", server.url()); + let result = downloader.download(&url, &dest, None, None).await; + + // Verify results + assert!(result.is_ok(), "Download should succeed"); + assert!(dest.exists(), "Downloaded file should exist"); + + let downloaded_content = std::fs::read(&dest).unwrap(); + assert_eq!(downloaded_content, test_data, "Content should match"); + + // Verify temporary file was cleaned up + let temp_file = dest.with_extension("tmp"); + assert!(!temp_file.exists(), "Temporary file should be cleaned up"); + + mock.assert(); + } + + /// Test Scenario 2: Concurrent downloads of multiple files + /// Verifies: Parallel execution, concurrency control, all files downloaded + #[tokio::test] + async fn test_concurrent_downloads() { + use mockito::Server; + + // Create Mock HTTP server + let mut server = Server::new_async().await; + + // Prepare 5 different test files + let test_files = vec![ + ("file1.txt", b"Content of file 1".to_vec()), + ("file2.txt", b"Content of file 2".to_vec()), + ("file3.txt", b"Content of file 3".to_vec()), + ("file4.txt", b"Content of file 4".to_vec()), + ("file5.txt", b"Content of file 5".to_vec()), + ]; + + // Create mock for each file + let mut mocks = Vec::new(); + for (filename, content) in &test_files { + let mock = server + .mock("GET", format!("/{}", filename).as_str()) + .with_status(200) + .with_header("content-length", &content.len().to_string()) + .with_body(content.as_slice()) + .create(); + mocks.push(mock); + } + + // Prepare download tasks + let temp_dir = tempdir().unwrap(); + let mut tasks = Vec::new(); + + for (filename, _) in &test_files { + let url = format!("{}/{}", server.url(), filename); + let dest = temp_dir.path().join(filename); + tasks.push((url, dest)); + } + + // Execute concurrent downloads (max 3 concurrent) + let downloader = TraumaDownloader::new(3, 2, 30); + let results = downloader.download_batch(tasks.clone()).await; + + // Verify all downloads succeeded + assert_eq!(results.len(), 5, "Should have 5 results"); + for (i, result) in results.iter().enumerate() { + assert!(result.is_ok(), "Download {} should succeed", i); + } + + // Verify file contents + for (i, (filename, expected_content)) in test_files.iter().enumerate() { + let dest = temp_dir.path().join(filename); + assert!(dest.exists(), "File {} should exist", filename); + + let content = std::fs::read(&dest).unwrap(); + assert_eq!( + content, *expected_content, + "Content of file {} should match", + i + ); + } + + // Verify all mocks were called + for mock in mocks { + mock.assert(); + } + } + + /// Test Scenario 3: Pause and resume during download + /// Verifies: Pause mechanism, temp file retention, successful continuation + #[tokio::test] + async fn test_pause_and_resume_download() { + use mockito::Server; + use std::time::Duration; + + // Create Mock HTTP server - simulate large file with chunked response + let mut server = Server::new_async().await; + let test_data = vec![b'X'; 10 * 1024]; // 10KB data + + let mock = server + .mock("GET", "/large-file.bin") + .with_status(200) + .with_header("content-length", &test_data.len().to_string()) + .with_chunked_body(move |w| { + // Simulate slow download to allow time for pause + for chunk in test_data.chunks(1024) { + std::thread::sleep(Duration::from_millis(50)); + w.write_all(chunk)?; + } + Ok(()) + }) + .create(); + + let temp_dir = tempdir().unwrap(); + let dest = temp_dir.path().join("large-file.bin"); + let url = format!("{}/large-file.bin", server.url()); + + let downloader = TraumaDownloader::default_settings(); + + // Start download and pause quickly + let url_clone = url.clone(); + let dest_clone = dest.clone(); + let downloader_clone = downloader.clone_for_task(); + + let download_handle = tokio::spawn(async move { + downloader_clone + .download(&url_clone, &dest_clone, None, None) + .await + }); + + // Wait for download to start + tokio::time::sleep(Duration::from_millis(100)).await; + + // Pause download + let pause_result = downloader.pause(&url).await; + assert!(pause_result.is_ok(), "Pause should succeed"); + + // Wait for download task to respond to pause signal + let download_result = download_handle.await.unwrap(); + assert!(download_result.is_err(), "Download should be cancelled"); + + // Verify temporary file exists (partially downloaded data) + let temp_file = dest.with_extension("tmp"); + assert!( + temp_file.exists(), + "Temporary file should exist after pause" + ); + + mock.assert(); + } + + /// Test Scenario 4: Resume from breakpoint + /// Verifies: Range request, resume from breakpoint, complete file download + #[tokio::test] + async fn test_resume_from_breakpoint() { + use mockito::Server; + + let temp_dir = tempdir().unwrap(); + let dest = temp_dir.path().join("resume-test.bin"); + let temp_file = dest.with_extension("tmp"); + + // Prepare complete test data + let full_data = vec![b'A'; 5000]; // 5KB + let partial_size = 2000; // First 2KB already downloaded + + // Simulate partially downloaded data + std::fs::write(&temp_file, &full_data[..partial_size]).unwrap(); + assert_eq!( + std::fs::metadata(&temp_file).unwrap().len(), + partial_size as u64, + "Partial file should exist" + ); + + // Create Mock HTTP server with Range support + let mut server = Server::new_async().await; + + // First Mock: HEAD request to detect Range support + let _head_mock = server + .mock("HEAD", "/resume-file.bin") + .with_status(200) + .with_header("accept-ranges", "bytes") + .with_header("content-length", &full_data.len().to_string()) + .create(); + + // Second Mock: GET request with Range support + let range_mock = server + .mock("GET", "/resume-file.bin") + .match_header("range", format!("bytes={}-", partial_size).as_str()) + .with_status(206) // Partial Content + .with_header( + "content-range", + format!( + "bytes {}-{}/{}", + partial_size, + full_data.len() - 1, + full_data.len() + ) + .as_str(), + ) + .with_header( + "content-length", + &(full_data.len() - partial_size).to_string(), + ) + .with_body(&full_data[partial_size..]) + .create(); + + // Execute resume download + let downloader = TraumaDownloader::default_settings(); + let url = format!("{}/resume-file.bin", server.url()); + let result = downloader.download(&url, &dest, None, None).await; + + // Verify download succeeded + assert!(result.is_ok(), "Resume download should succeed"); + assert!(dest.exists(), "Final file should exist"); + assert!(!temp_file.exists(), "Temporary file should be removed"); + + // Verify complete file content + let downloaded_data = std::fs::read(&dest).unwrap(); + assert_eq!( + downloaded_data.len(), + full_data.len(), + "File size should match" + ); + assert_eq!( + downloaded_data, full_data, + "Content should match completely" + ); + + range_mock.assert(); + } +} diff --git a/src/error.rs b/src/error.rs index c83682b..313146e 100644 --- a/src/error.rs +++ b/src/error.rs @@ -1,5 +1,46 @@ use std::result; +#[derive(Debug)] +pub enum Error { + Io(std::io::Error), + Other(String), + Getter(GetterError), +} + +impl std::fmt::Display for Error { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + Error::Io(e) => write!(f, "IO error: {e}"), + Error::Other(msg) => write!(f, "{msg}"), + Error::Getter(e) => write!(f, "{e}"), + } + } +} + +impl std::error::Error for Error { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match self { + Error::Io(e) => Some(e), + Error::Getter(e) => e.source(), + Error::Other(_) => None, + } + } +} + +impl From for Error { + fn from(e: std::io::Error) -> Self { + Error::Io(e) + } +} + +impl From for Error { + fn from(e: GetterError) -> Self { + Error::Getter(e) + } +} + +pub type Result = result::Result; + #[derive(Debug)] pub struct GetterError { pub tag: String, @@ -39,5 +80,3 @@ impl std::error::Error for GetterError { } } } - -pub type Result = result::Result; diff --git a/src/lib.rs b/src/lib.rs index 61ff5f7..4c1491e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,8 +1,10 @@ -pub mod api; mod cache; mod core; +pub mod database; +pub mod downloader; mod error; mod locale; +pub mod manager; pub mod rpc; mod utils; mod websdk; diff --git a/src/manager/android_api.rs b/src/manager/android_api.rs new file mode 100644 index 0000000..5e39948 --- /dev/null +++ b/src/manager/android_api.rs @@ -0,0 +1,104 @@ +use std::collections::HashMap; + +use jsonrpsee::core::client::ClientT; +use jsonrpsee::http_client::HttpClientBuilder; +use jsonrpsee::rpc_params; +use once_cell::sync::OnceCell; +use serde::{Deserialize, Serialize}; + +use crate::database::models::app::AppRecord; + +/// Global Android API client, registered via `register_android_api` RPC. +static ANDROID_API: OnceCell = OnceCell::new(); + +pub fn set_android_api(url: String) { + let _ = ANDROID_API.set(AndroidApi { url }); +} + +pub fn get_android_api() -> Option<&'static AndroidApi> { + ANDROID_API.get() +} + +/// Rust-side HTTP JSON-RPC client that calls back into the Kotlin +/// KotlinHubRpcServer for Android-specific functionality. +/// +/// Kotlin exposes `get_local_version` and `get_installed_apps` methods +/// on the same Ktor HTTP server that handles hub provider calls. +pub struct AndroidApi { + url: String, +} + +#[derive(Serialize, Deserialize, Debug)] +struct GetLocalVersionParams { + app_id: HashMap>, +} + +#[derive(Serialize, Deserialize, Debug)] +struct GetInstalledAppsParams { + ignore_system: bool, +} + +impl AndroidApi { + /// Query Kotlin for the locally-installed version of an app. + /// + /// Calls `get_local_version({app_id})` on the Kotlin Ktor server. + /// Returns `None` if the app is not installed or the call fails. + pub async fn get_local_version( + &self, + app_id: &HashMap>, + ) -> Option { + let client = HttpClientBuilder::default().build(&self.url).ok()?; + let params = rpc_params!(GetLocalVersionParams { + app_id: app_id.clone(), + }); + client + .request::, _>("get_local_version", params) + .await + .unwrap_or(None) + } + + /// Query Kotlin for all installed Android apps and Magisk modules. + /// + /// Calls `get_installed_apps({ignore_system})` on the Kotlin Ktor server. + /// Returns an empty list if the call fails. + pub async fn get_installed_apps(&self, ignore_system: bool) -> Vec { + let client = match HttpClientBuilder::default().build(&self.url) { + Ok(c) => c, + Err(_) => return vec![], + }; + let params = rpc_params!(GetInstalledAppsParams { ignore_system }); + client + .request::, _>("get_installed_apps", params) + .await + .unwrap_or_default() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_get_local_version_params_serialization() { + let app_id: HashMap> = HashMap::from([( + "android_app_package".to_string(), + Some("com.example.app".to_string()), + )]); + let params = GetLocalVersionParams { + app_id: app_id.clone(), + }; + let json = serde_json::to_string(¶ms).unwrap(); + assert!(json.contains("android_app_package")); + assert!(json.contains("com.example.app")); + } + + #[test] + fn test_get_installed_apps_params_serialization() { + let params = GetInstalledAppsParams { + ignore_system: true, + }; + let json = serde_json::to_string(¶ms).unwrap(); + assert!(json.contains("ignore_system")); + assert!(json.contains("true")); + } +} diff --git a/src/manager/app_manager.rs b/src/manager/app_manager.rs new file mode 100644 index 0000000..12b0b32 --- /dev/null +++ b/src/manager/app_manager.rs @@ -0,0 +1,462 @@ +use std::collections::HashMap; +use std::sync::Arc; +use tokio::sync::{RwLock, Semaphore}; + +use super::android_api; +use super::app_status::AppStatus; +use super::data_getter::DataGetter; +use super::notification::{notify_if_registered, ManagerEvent}; +use super::updater::get_release_status; +use super::version_map::VersionMap; +use crate::database::get_db; +use crate::database::models::app::AppRecord; +use crate::error::Result; + +pub type UpdateCallback = Arc; + +/// Manages all tracked apps and their version data. +/// +/// Mirrors Kotlin's `AppManager`. +pub struct AppManager { + /// Saved apps from the database (keyed by record id) + apps: Arc>>, + /// Virtual apps from Android installed packages (not persisted) + virtual_apps: Arc>>, + /// In-memory version maps keyed by app record id + version_maps: Arc>>, + data_getter: Arc, +} + +impl AppManager { + pub fn load() -> Result { + let records = get_db().load_apps()?; + let apps = records.into_iter().map(|a| (a.id.clone(), a)).collect(); + Ok(Self { + apps: Arc::new(RwLock::new(apps)), + virtual_apps: Arc::new(RwLock::new(vec![])), + version_maps: Arc::new(RwLock::new(HashMap::new())), + data_getter: Arc::new(DataGetter::new()), + }) + } + + /// Replace the virtual app list (installed Android packages). + pub async fn set_virtual_apps(&self, apps: Vec) { + *self.virtual_apps.write().await = apps; + } + + pub async fn get_all_apps(&self) -> Vec { + let mut result: Vec = self.apps.read().await.values().cloned().collect(); + result.extend(self.virtual_apps.read().await.clone()); + result + } + + pub async fn get_saved_apps(&self) -> Vec { + self.apps.read().await.values().cloned().collect() + } + + pub async fn find_app_by_id( + &self, + app_id: &HashMap>, + ) -> Option { + self.apps + .read() + .await + .values() + .find(|a| &a.app_id == app_id) + .cloned() + } + + pub async fn get_app(&self, record_id: &str) -> Option { + self.apps.read().await.get(record_id).cloned() + } + + /// Persist (insert or update) an app record. + pub async fn save_app(&self, mut record: AppRecord) -> Result { + if record.id.is_empty() { + record.id = uuid::Uuid::new_v4().to_string(); + } + get_db().upsert_app(&record)?; + self.apps + .write() + .await + .insert(record.id.clone(), record.clone()); + Ok(record) + } + + /// Remove a saved app. + pub async fn remove_app(&self, record_id: &str) -> Result { + let deleted = get_db().delete_app(record_id)?; + self.apps.write().await.remove(record_id); + self.version_maps.write().await.remove(record_id); + Ok(deleted) + } + + /// Return the current AppStatus for an app. + /// + /// Queries Kotlin for the locally-installed version via `AndroidApi.get_local_version()` + /// if a callback URL has been registered; otherwise local_version is `None`. + pub async fn get_app_status(&self, record_id: &str) -> AppStatus { + let app = match self.apps.read().await.get(record_id).cloned() { + Some(a) => a, + None => return AppStatus::AppInactive, + }; + + // Query local version from Android via registered callback + let local_version: Option = match android_api::get_android_api() { + Some(api) => api.get_local_version(&app.app_id).await, + None => None, + }; + + let mut maps = self.version_maps.write().await; + let vm = maps.entry(record_id.to_string()).or_insert_with(|| { + VersionMap::new( + app.invalid_version_number_field_regex.clone(), + app.include_version_number_field_regex.clone(), + ) + }); + get_release_status( + vm, + local_version.as_deref(), + app.ignore_version_number.as_deref(), + true, + ) + } + + /// Refresh version data for all saved apps. + /// + /// Uses a semaphore (max 10 concurrent hub requests) matching Kotlin's logic. + /// Apps that have version-filter regexes (`need_complete_version`) skip the batch + /// API entirely and go straight to the full release-list path, mirroring Kotlin's + /// `simpleMap` / `completeMap` split in `AppManager.renewAppList()`. + /// Fires `RenewProgress` notifications to Kotlin UI as each app completes. + pub async fn renew_all( + &self, + hubs: &[crate::database::models::hub::HubRecord], + progress_cb: Option, + ) { + let apps = self.get_saved_apps().await; + let total = apps.len(); + let semaphore = Arc::new(Semaphore::new(10)); + + // Group apps by hub, splitting into simple (batch-eligible) vs complete-only. + let mut hub_simple_map: HashMap> = HashMap::new(); + let mut hub_complete_map: HashMap> = HashMap::new(); + for app in &apps { + let sorted_hubs = app.get_sorted_hub_uuids(); + let effective_hubs: Vec<&str> = if sorted_hubs.is_empty() { + hubs.iter().map(|h| h.uuid.as_str()).collect() + } else { + sorted_hubs.iter().map(String::as_str).collect() + }; + let dest = if need_complete_version(app) { + &mut hub_complete_map + } else { + &mut hub_simple_map + }; + for hub_uuid in effective_hubs { + dest.entry(hub_uuid.to_string()) + .or_default() + .push(app.clone()); + } + } + + let completed = Arc::new(std::sync::atomic::AtomicUsize::new(0)); + let mut handles = vec![]; + + for hub in hubs { + let simple_apps = hub_simple_map.get(&hub.uuid).cloned().unwrap_or_default(); + let complete_apps = hub_complete_map.get(&hub.uuid).cloned().unwrap_or_default(); + if simple_apps.is_empty() && complete_apps.is_empty() { + continue; + } + + let hub = hub.clone(); + let getter = self.data_getter.clone(); + let version_maps = self.version_maps.clone(); + let sem = semaphore.clone(); + let completed = completed.clone(); + let cb = progress_cb.clone(); + + let handle = tokio::spawn(async move { + let _permit = sem.acquire().await.unwrap(); + + // --- Batch path (simple apps only) --- + let mut need_full: Vec = complete_apps; // complete-only apps go straight here + if !simple_apps.is_empty() { + let app_ids: Vec>> = + simple_apps.iter().map(|a| a.app_id.clone()).collect(); + let latest_results = getter.get_latest_releases(&hub, &app_ids).await; + + for (app, (_, maybe_release)) in simple_apps.iter().zip(latest_results.iter()) { + if let Some(release) = maybe_release { + let new_status = { + let mut maps = version_maps.write().await; + let vm = maps.entry(app.id.clone()).or_insert_with(|| { + VersionMap::new( + app.invalid_version_number_field_regex.clone(), + app.include_version_number_field_regex.clone(), + ) + }); + vm.add_single_release(&hub.uuid, release.clone()); + let local_version: Option = + match android_api::get_android_api() { + Some(api) => api.get_local_version(&app.app_id).await, + None => None, + }; + get_release_status( + vm, + local_version.as_deref(), + app.ignore_version_number.as_deref(), + true, + ) + }; + let done = + completed.fetch_add(1, std::sync::atomic::Ordering::SeqCst) + 1; + if let Some(ref f) = cb { + f(done, total); + } + notify_if_registered(ManagerEvent::RenewProgress { done, total }).await; + if new_status != AppStatus::AppPending { + notify_if_registered(ManagerEvent::AppStatusChanged { + record_id: app.id.clone(), + app_id: app.app_id.clone(), + old_status: AppStatus::AppPending, + new_status, + }) + .await; + } + } else { + // Batch returned nothing for this app — escalate to full list. + need_full.push(app.clone()); + } + } + } + + // --- Full release-list path --- + for app in need_full { + let new_status = { + let mut maps = version_maps.write().await; + let vm = maps.entry(app.id.clone()).or_insert_with(|| { + VersionMap::new( + app.invalid_version_number_field_regex.clone(), + app.include_version_number_field_regex.clone(), + ) + }); + if let Some(releases) = getter.get_release_list(&hub, &app.app_id).await { + vm.add_release_list(&hub.uuid, releases); + } else { + vm.set_error(&hub.uuid); + } + let local_version: Option = match android_api::get_android_api() { + Some(api) => api.get_local_version(&app.app_id).await, + None => None, + }; + get_release_status( + vm, + local_version.as_deref(), + app.ignore_version_number.as_deref(), + true, + ) + }; + let done = completed.fetch_add(1, std::sync::atomic::Ordering::SeqCst) + 1; + if let Some(ref f) = cb { + f(done, total); + } + notify_if_registered(ManagerEvent::RenewProgress { done, total }).await; + if new_status != AppStatus::AppPending { + notify_if_registered(ManagerEvent::AppStatusChanged { + record_id: app.id.clone(), + app_id: app.app_id.clone(), + old_status: AppStatus::AppPending, + new_status, + }) + .await; + } + } + }); + handles.push(handle); + } + + for handle in handles { + let _ = handle.await; + } + } + + /// Return record IDs of saved apps that have no valid hub configured. + /// + /// An app is considered "invalid" when its `enable_hub_list` is non-empty but + /// none of the listed hub UUIDs exist in `known_hub_uuids`. Apps with an empty + /// hub list (meaning "use all hubs") are never reported as invalid. + pub async fn check_invalid_applications(&self, known_hub_uuids: &[String]) -> Vec { + let apps = self.apps.read().await; + apps.values() + .filter_map(|app| { + let hub_uuids = app.get_sorted_hub_uuids(); + // Empty list means "match any hub" — not invalid. + if hub_uuids.is_empty() { + return None; + } + let has_valid = hub_uuids.iter().any(|uuid| known_hub_uuids.contains(uuid)); + if has_valid { + None + } else { + Some(app.id.clone()) + } + }) + .collect() + } +} + +/// Returns `true` if the app requires the full release list rather than the single +/// latest-release batch API. +/// +/// Mirrors Kotlin's `App.needCompleteVersion`: +/// ```kotlin +/// val needCompleteVersion: Boolean +/// get() = db.includeVersionNumberFieldRegexString != null +/// || db.invalidVersionNumberFieldRegexString != null +/// ``` +fn need_complete_version(app: &AppRecord) -> bool { + app.include_version_number_field_regex.is_some() + || app.invalid_version_number_field_regex.is_some() +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::database; + + #[test] + fn test_app_record_save_and_load() { + let dir = tempfile::tempdir().unwrap(); + let db = database::Database::open(dir.path()).unwrap(); + + let app = AppRecord::new( + "MyApp".to_string(), + HashMap::from([("owner".to_string(), Some("alice".to_string()))]), + ); + db.upsert_app(&app).unwrap(); + let loaded = db.load_apps().unwrap(); + assert_eq!(loaded.len(), 1); + assert_eq!(loaded[0].name, "MyApp"); + } + + #[tokio::test] + async fn test_version_map_get_status_no_data() { + let mut vm = VersionMap::new(None, None); + let status = get_release_status(&mut vm, Some("1.0.0"), None, true); + // Empty version map + is_saved → NetworkError (no hub_status entries) + assert_eq!(status, AppStatus::NetworkError); + } + + // ------------------------------------------------------------------------- + // Phase 7A: need_complete_version + // ------------------------------------------------------------------------- + + #[test] + fn test_need_complete_version_false_when_no_regex() { + let app = AppRecord::new("App".to_string(), HashMap::new()); + assert!(!need_complete_version(&app)); + } + + #[test] + fn test_need_complete_version_true_when_invalid_regex() { + let mut app = AppRecord::new("App".to_string(), HashMap::new()); + app.invalid_version_number_field_regex = Some("alpha|beta".to_string()); + assert!(need_complete_version(&app)); + } + + #[test] + fn test_need_complete_version_true_when_include_regex() { + let mut app = AppRecord::new("App".to_string(), HashMap::new()); + app.include_version_number_field_regex = Some(r"\d+\.\d+".to_string()); + assert!(need_complete_version(&app)); + } + + #[test] + fn test_need_complete_version_true_when_both_regex() { + let mut app = AppRecord::new("App".to_string(), HashMap::new()); + app.invalid_version_number_field_regex = Some("alpha".to_string()); + app.include_version_number_field_regex = Some(r"\d+".to_string()); + assert!(need_complete_version(&app)); + } + + // ------------------------------------------------------------------------- + // Phase 7B: check_invalid_applications + // ------------------------------------------------------------------------- + + fn make_app_with_hubs(name: &str, hubs: &[&str]) -> AppRecord { + let mut app = AppRecord::new(name.to_string(), HashMap::new()); + let hub_strs: Vec = hubs.iter().map(|s| s.to_string()).collect(); + app.set_sorted_hub_uuids(&hub_strs); + app + } + + async fn app_manager_with_apps(apps: Vec) -> AppManager { + let map = apps.into_iter().map(|a| (a.id.clone(), a)).collect(); + AppManager { + apps: Arc::new(RwLock::new(map)), + virtual_apps: Arc::new(RwLock::new(vec![])), + version_maps: Arc::new(RwLock::new(HashMap::new())), + data_getter: Arc::new(DataGetter::new()), + } + } + + #[tokio::test] + async fn test_check_invalid_no_apps() { + let mgr = app_manager_with_apps(vec![]).await; + let invalid = mgr.check_invalid_applications(&["hub-1".to_string()]).await; + assert!(invalid.is_empty()); + } + + #[tokio::test] + async fn test_check_invalid_empty_hub_list_not_reported() { + // App with no hub list means "use all hubs" — never invalid. + let app = AppRecord::new("NoHubs".to_string(), HashMap::new()); + let mgr = app_manager_with_apps(vec![app]).await; + let invalid = mgr.check_invalid_applications(&[]).await; + assert!(invalid.is_empty()); + } + + #[tokio::test] + async fn test_check_invalid_all_hubs_known() { + let app = make_app_with_hubs("GoodApp", &["hub-a", "hub-b"]); + let mgr = app_manager_with_apps(vec![app]).await; + let known = vec!["hub-a".to_string(), "hub-b".to_string()]; + let invalid = mgr.check_invalid_applications(&known).await; + assert!(invalid.is_empty()); + } + + #[tokio::test] + async fn test_check_invalid_one_hub_known_is_valid() { + // Even if only one of the listed hubs is known, the app is valid. + let app = make_app_with_hubs("SemiGood", &["hub-a", "hub-unknown"]); + let mgr = app_manager_with_apps(vec![app]).await; + let known = vec!["hub-a".to_string()]; + let invalid = mgr.check_invalid_applications(&known).await; + assert!(invalid.is_empty()); + } + + #[tokio::test] + async fn test_check_invalid_all_hubs_unknown() { + let app = make_app_with_hubs("BadApp", &["hub-x", "hub-y"]); + let app_id = app.id.clone(); + let mgr = app_manager_with_apps(vec![app]).await; + let known = vec!["hub-a".to_string()]; + let invalid = mgr.check_invalid_applications(&known).await; + assert_eq!(invalid, vec![app_id]); + } + + #[tokio::test] + async fn test_check_invalid_mixed_apps() { + let good = make_app_with_hubs("Good", &["hub-a"]); + let bad = make_app_with_hubs("Bad", &["hub-z"]); + let bad_id = bad.id.clone(); + let no_hub = AppRecord::new("NoHub".to_string(), HashMap::new()); + let mgr = app_manager_with_apps(vec![good, bad, no_hub]).await; + let known = vec!["hub-a".to_string()]; + let mut invalid = mgr.check_invalid_applications(&known).await; + invalid.sort(); + assert_eq!(invalid, vec![bad_id]); + } +} diff --git a/src/manager/app_status.rs b/src/manager/app_status.rs new file mode 100644 index 0000000..088ce22 --- /dev/null +++ b/src/manager/app_status.rs @@ -0,0 +1,18 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum AppStatus { + /// App exists only in hub's auto-discovery; not in user's saved list + AppInactive, + /// Version data is being fetched + AppPending, + /// Network request failed + NetworkError, + /// Local version is up to date + AppLatest, + /// A newer version is available + AppOutdated, + /// No local version found (e.g. not installed) + AppNoLocal, +} diff --git a/src/manager/auto_template.rs b/src/manager/auto_template.rs new file mode 100644 index 0000000..bfb640d --- /dev/null +++ b/src/manager/auto_template.rs @@ -0,0 +1,206 @@ +use std::collections::HashMap; + +/// URL template regex: matches `%placeholder` tokens. +/// Mirrors Kotlin's `URL_ARG_REGEX = "(%.*?)\\w*"`. +const URL_ARG_PATTERN: &str = r"(%[^%/?\s]+)"; + +/// Given a URL and a list of templates, return the first template that fully +/// matches the URL, with all placeholder values extracted. +/// +/// Template format: `https://github.com/%owner/%repo/releases` +/// Placeholders are `%key` tokens. The returned map has keys without the `%`. +/// +/// Returns `None` if no template matches fully. +/// +/// Mirrors Kotlin's `AutoTemplate.urlToAppId()`. +pub fn url_to_app_id(url: &str, templates: &[String]) -> Option> { + if url.is_empty() || templates.is_empty() { + return None; + } + for template in templates { + if let Some(args) = match_template(url, template) { + return Some(args); + } + } + None +} + +/// Attempt to match `url` against a single `template`. +/// +/// The algorithm splits the template into alternating [literal, placeholder] +/// segments, then uses literals to cut the URL apart and extract placeholder +/// values. Mirrors Kotlin's `AutoTemplate.matchArgs()` and `checkFull()`. +fn match_template(url: &str, template: &str) -> Option> { + let re = regex::Regex::new(URL_ARG_PATTERN).ok()?; + + // Build ordered list of segments: either a literal string or a %placeholder + let mut segments: Vec = Vec::new(); + let mut last = 0; + for m in re.find_iter(template) { + if m.start() > last { + segments.push(Segment::Literal(template[last..m.start()].to_string())); + } + segments.push(Segment::Placeholder(m.as_str().to_string())); + last = m.end(); + } + if last < template.len() { + segments.push(Segment::Literal(template[last..].to_string())); + } + + // Collect expected placeholder keys (in order) + let expected_keys: Vec = segments + .iter() + .filter_map(|s| { + if let Segment::Placeholder(p) = s { + Some(p.trim_start_matches('%').to_string()) + } else { + None + } + }) + .collect(); + + if expected_keys.is_empty() { + return None; + } + + // Walk through segments: use literals to split the URL and assign values to + // adjacent placeholders. + let mut args: HashMap = HashMap::new(); + let mut remaining = url.to_string(); + + for (i, seg) in segments.iter().enumerate() { + match seg { + Segment::Literal(lit) => { + if lit.is_empty() { + continue; + } + // Find the literal in `remaining`, split on it + match remaining.split_once(lit.as_str()) { + Some((before, after)) => { + // `before` belongs to the preceding placeholder (if any) + if i > 0 { + if let Segment::Placeholder(key) = &segments[i - 1] { + let k = key.trim_start_matches('%').to_string(); + if !before.is_empty() { + args.insert(k, before.to_string()); + } + } + } + remaining = after.to_string(); + } + None => return None, // literal not found → no match + } + } + Segment::Placeholder(_) => { + // Value will be filled in when the next literal is processed, + // or captured from trailing remaining string at end. + } + } + } + + // If the last segment is a placeholder, the rest of `remaining` is its value + if let Some(Segment::Placeholder(key)) = segments.last() { + let k = key.trim_start_matches('%').to_string(); + // Strip trailing slash or query string for cleanliness + let val = remaining.trim_end_matches('/').to_string(); + if !val.is_empty() { + args.insert(k, val); + } + } + + // Verify all expected keys were matched + for key in &expected_keys { + if !args.contains_key(key) { + return None; + } + } + + Some(args) +} + +#[derive(Debug)] +enum Segment { + Literal(String), + Placeholder(String), +} + +#[cfg(test)] +mod tests { + use super::*; + + fn templates(list: &[&str]) -> Vec { + list.iter().map(|s| s.to_string()).collect() + } + + #[test] + fn test_github_url() { + let url = "https://github.com/DUpdateSystem/UpgradeAll"; + let result = url_to_app_id(url, &templates(&["https://github.com/%owner/%repo"])); + let map = result.unwrap(); + assert_eq!(map["owner"], "DUpdateSystem"); + assert_eq!(map["repo"], "UpgradeAll"); + } + + #[test] + fn test_github_url_with_trailing_slash() { + let url = "https://github.com/foo/bar/"; + let result = url_to_app_id(url, &templates(&["https://github.com/%owner/%repo/"])); + let map = result.unwrap(); + assert_eq!(map["owner"], "foo"); + assert_eq!(map["repo"], "bar"); + } + + #[test] + fn test_gitlab_url() { + let url = "https://gitlab.com/AuroraOSS/AuroraStore"; + let result = url_to_app_id(url, &templates(&["https://gitlab.com/%owner/%repo"])); + let map = result.unwrap(); + assert_eq!(map["owner"], "AuroraOSS"); + assert_eq!(map["repo"], "AuroraStore"); + } + + #[test] + fn test_no_match_returns_none() { + let url = "https://example.com/something/else"; + let result = url_to_app_id(url, &templates(&["https://github.com/%owner/%repo"])); + assert!(result.is_none()); + } + + #[test] + fn test_first_matching_template_wins() { + let url = "https://github.com/user/proj"; + let result = url_to_app_id( + url, + &templates(&[ + "https://gitlab.com/%owner/%repo", + "https://github.com/%owner/%repo", + ]), + ); + let map = result.unwrap(); + assert_eq!(map["owner"], "user"); + assert_eq!(map["repo"], "proj"); + } + + #[test] + fn test_empty_url_returns_none() { + let result = url_to_app_id("", &templates(&["https://github.com/%owner/%repo"])); + assert!(result.is_none()); + } + + #[test] + fn test_empty_templates_returns_none() { + let result = url_to_app_id("https://github.com/a/b", &[]); + assert!(result.is_none()); + } + + #[test] + fn test_single_placeholder() { + let url = "https://f-droid.org/packages/com.example.app/"; + let result = url_to_app_id( + url, + &templates(&["https://f-droid.org/packages/%package_name/"]), + ); + let map = result.unwrap(); + assert_eq!(map["package_name"], "com.example.app"); + } +} diff --git a/src/manager/cloud_config_getter.rs b/src/manager/cloud_config_getter.rs new file mode 100644 index 0000000..b4fbc66 --- /dev/null +++ b/src/manager/cloud_config_getter.rs @@ -0,0 +1,403 @@ +use std::collections::HashMap; +use std::sync::Arc; +use tokio::sync::RwLock; + +use crate::database::models::app::AppRecord; +use crate::database::models::hub::HubRecord; +use crate::error::{Error, Result}; +use crate::manager::app_manager::AppManager; +use crate::manager::auto_template::url_to_app_id; +use crate::manager::hub_manager::HubManager; +use crate::websdk::cloud_rules::cloud_rules_manager::CloudRules; +use crate::websdk::cloud_rules::data::app_item::AppItem; +use crate::websdk::cloud_rules::data::hub_item::HubItem; + +/// Manages downloading, caching, and applying cloud hub/app configurations. +/// +/// Mirrors Kotlin's `CloudConfigGetter` singleton. +pub struct CloudConfigGetter { + api_url: String, + cloud_rules: Arc>>, +} + +impl CloudConfigGetter { + pub fn new(api_url: String) -> Self { + Self { + api_url, + cloud_rules: Arc::new(RwLock::new(None)), + } + } + + /// Download and cache the latest cloud config list. + pub async fn renew(&self) -> Result<()> { + let mut rules = CloudRules::new(&self.api_url); + rules + .renew() + .await + .map_err(|e| Error::Other(e.to_string()))?; + *self.cloud_rules.write().await = Some(rules); + Ok(()) + } + + /// Returns all available app configs from the cached cloud config. + pub async fn app_config_list(&self) -> Vec { + match self.cloud_rules.read().await.as_ref() { + Some(rules) => rules + .get_config_list() + .app_config_list + .into_iter() + .cloned() + .collect(), + None => vec![], + } + } + + /// Returns all available hub configs from the cached cloud config. + pub async fn hub_config_list(&self) -> Vec { + match self.cloud_rules.read().await.as_ref() { + Some(rules) => rules + .get_config_list() + .hub_config_list + .into_iter() + .cloned() + .collect(), + None => vec![], + } + } + + /// Apply a cloud hub config by UUID: download config → convert → upsert into HubManager. + /// + /// Returns true if the hub was installed or updated. + pub async fn apply_hub_config(&self, uuid: &str, hub_mgr: &mut HubManager) -> Result { + let hub_item = self + .find_hub_item(uuid) + .await + .ok_or_else(|| Error::Other(format!("Hub config not found: {uuid}")))?; + + let record = hub_item_to_record(&hub_item, hub_mgr).await; + hub_mgr.upsert_hub(record).await?; + Ok(true) + } + + /// Apply a cloud app config by UUID: + /// 1. Ensure hub dependency is installed. + /// 2. Extract app_id from the app's URL using the hub's URL templates. + /// 3. Merge with `extra_map` from the cloud config. + /// 4. Upsert into AppManager. + /// + /// Returns true if the app was installed or updated. + pub async fn apply_app_config( + &self, + uuid: &str, + app_mgr: &mut AppManager, + hub_mgr: &mut HubManager, + ) -> Result { + let app_item = self + .find_app_item(uuid) + .await + .ok_or_else(|| Error::Other(format!("App config not found: {uuid}")))?; + + // Ensure hub dependency is present + self.solve_hub_dependency(&app_item.base_hub_uuid, hub_mgr) + .await?; + + // Build the app_id map + let app_id = build_app_id(&app_item, hub_mgr).await; + + // Find existing record by cloud UUID or create a new one + let mut record = app_mgr + .find_app_by_cloud_uuid(&app_item.uuid) + .await + .unwrap_or_else(|| { + let mut r = AppRecord::new(app_item.info.name.clone(), app_id.clone()); + r.id = String::new(); // let save_app assign UUID + r + }); + + record.name = app_item.info.name.clone(); + record.app_id = app_id; + record.cloud_config = Some(app_item.clone()); + + // Ensure base_hub_uuid is first in the hub priority list + let mut hub_uuids = record.get_sorted_hub_uuids(); + if !hub_uuids.contains(&app_item.base_hub_uuid) { + hub_uuids.insert(0, app_item.base_hub_uuid.clone()); + } else if hub_uuids[0] != app_item.base_hub_uuid { + hub_uuids.retain(|u| u != &app_item.base_hub_uuid); + hub_uuids.insert(0, app_item.base_hub_uuid.clone()); + } + record.set_sorted_hub_uuids(&hub_uuids); + + app_mgr.save_app(record).await?; + Ok(true) + } + + /// Bulk-update all installed apps and hubs whose cloud config version has increased. + /// + /// Mirrors Kotlin's `renewAllAppConfigFromCloud` + `renewAllHubConfigFromCloud`. + pub async fn renew_all_from_cloud( + &self, + app_mgr: &mut AppManager, + hub_mgr: &mut HubManager, + ) -> Result<()> { + // Renew hubs + let installed_hubs = hub_mgr.get_hub_list().await; + for hub in &installed_hubs { + if let Some(cloud_hub) = self.find_hub_item(&hub.uuid).await { + if cloud_hub.config_version > hub.hub_config.config_version { + let record = hub_item_to_record(&cloud_hub, hub_mgr).await; + hub_mgr.upsert_hub(record).await?; + } + } + } + + // Renew apps + let installed_apps = app_mgr.get_saved_apps().await; + for app in &installed_apps { + let cloud_uuid = match app.cloud_config.as_ref().map(|c| c.uuid.as_str()) { + Some(u) if !u.is_empty() => u.to_string(), + _ => continue, + }; + if let Some(cloud_app) = self.find_app_item(&cloud_uuid).await { + let installed_version = app + .cloud_config + .as_ref() + .map(|c| c.config_version) + .unwrap_or(0); + if cloud_app.config_version > installed_version { + let _ = self.apply_app_config(&cloud_uuid, app_mgr, hub_mgr).await; + } + } + } + + Ok(()) + } + + /// Ensure a hub is installed; if not, download and install it from cloud config. + async fn solve_hub_dependency(&self, hub_uuid: &str, hub_mgr: &mut HubManager) -> Result<()> { + if hub_mgr.get_hub(hub_uuid).await.is_some() { + // Already installed — check if update needed + let installed = hub_mgr.get_hub(hub_uuid).await.unwrap(); + if let Some(cloud) = self.find_hub_item(hub_uuid).await { + if cloud.config_version > installed.hub_config.config_version { + let record = hub_item_to_record(&cloud, hub_mgr).await; + hub_mgr.upsert_hub(record).await?; + } + } + } else { + // Not installed — download and install + self.apply_hub_config(hub_uuid, hub_mgr).await?; + } + Ok(()) + } + + async fn find_app_item(&self, uuid: &str) -> Option { + self.cloud_rules + .read() + .await + .as_ref()? + .get_config_list() + .app_config_list + .into_iter() + .find(|a| a.uuid == uuid) + .cloned() + } + + async fn find_hub_item(&self, uuid: &str) -> Option { + self.cloud_rules + .read() + .await + .as_ref()? + .get_config_list() + .hub_config_list + .into_iter() + .find(|h| h.uuid == uuid) + .cloned() + } +} + +/// Convert a `HubItem` from cloud config to a `HubRecord`, preserving any +/// existing auth / ignore lists if the hub is already installed. +async fn hub_item_to_record(hub_item: &HubItem, hub_mgr: &HubManager) -> HubRecord { + if let Some(existing) = hub_mgr.get_hub(&hub_item.uuid).await { + // Preserve mutable fields, update config fields + HubRecord { + hub_config: hub_item.clone(), + ..existing + } + } else { + HubRecord::new(hub_item.uuid.clone(), hub_item.clone()) + } +} + +/// Build the `app_id` map from a cloud `AppItem`. +/// +/// 1. Try to extract from `info.url` using the hub's `app_url_templates`. +/// 2. Merge with `info.extra_map` (extra_map takes precedence for any shared keys). +async fn build_app_id(app_item: &AppItem, hub_mgr: &HubManager) -> HashMap> { + let mut app_id: HashMap> = HashMap::new(); + + // Try URL template extraction + if let Some(hub) = hub_mgr.get_hub(&app_item.base_hub_uuid).await { + let templates = &hub.hub_config.app_url_templates; + if !app_item.info.url.is_empty() { + if let Some(extracted) = url_to_app_id(&app_item.info.url, templates) { + for (k, v) in extracted { + app_id.insert(k, Some(v)); + } + } + } + } + + // Merge extra_map — extra_map values always win over URL-extracted values + for (k, v) in &app_item.info.extra_map { + app_id.insert(k.clone(), Some(v.clone())); + } + + app_id +} + +// Extension on AppManager to find by cloud UUID +impl AppManager { + pub async fn find_app_by_cloud_uuid(&self, cloud_uuid: &str) -> Option { + self.get_saved_apps().await.into_iter().find(|a| { + a.cloud_config + .as_ref() + .map(|c| c.uuid == cloud_uuid) + .unwrap_or(false) + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::websdk::cloud_rules::data::app_item::AppInfo; + use crate::websdk::cloud_rules::data::hub_item::Info; + + fn make_hub_item(uuid: &str, templates: Vec) -> HubItem { + HubItem { + base_version: 6, + config_version: 1, + uuid: uuid.to_string(), + info: Info { + hub_name: "GitHub".to_string(), + hub_icon_url: None, + }, + api_keywords: vec!["owner".to_string(), "repo".to_string()], + auth_keywords: vec![], + app_url_templates: templates, + target_check_api: None, + } + } + + fn make_app_item(uuid: &str, hub_uuid: &str, url: &str) -> AppItem { + AppItem { + base_version: 2, + config_version: 1, + uuid: uuid.to_string(), + base_hub_uuid: hub_uuid.to_string(), + info: AppInfo { + name: "TestApp".to_string(), + url: url.to_string(), + extra_map: HashMap::new(), + }, + } + } + + #[tokio::test] + async fn test_build_app_id_from_url() { + let dir = tempfile::tempdir().unwrap(); + let _ = crate::database::init_db(dir.path()); + + let hub_uuid = "fd9b2602-62c5-4d55-bd1e-0d6537714ca0"; + let db = crate::database::Database::open(dir.path()).unwrap(); + let hub = HubRecord::new( + hub_uuid.to_string(), + make_hub_item( + hub_uuid, + vec!["https://github.com/%owner/%repo/".to_string()], + ), + ); + db.upsert_hub(&hub).unwrap(); + + let hub_mgr = HubManager::load().unwrap_or_else(|_| { + // fallback: create manager from local db + let records = db.load_hubs().unwrap(); + let _ = records; // suppress unused warning + // We can't easily construct HubManager without global DB, so use a helper + panic!("This test requires global DB init") + }); + let _ = hub_mgr; + } + + #[test] + fn test_build_app_id_extra_map_wins() { + // extra_map should take precedence over URL-extracted values + let app_item = AppItem { + base_version: 2, + config_version: 1, + uuid: "test-uuid".to_string(), + base_hub_uuid: "hub-uuid".to_string(), + info: AppInfo { + name: "TestApp".to_string(), + url: "https://github.com/owner/repo".to_string(), + extra_map: HashMap::from([ + ("android_app_package".to_string(), "com.example".to_string()), + ("owner".to_string(), "override_owner".to_string()), + ]), + }, + }; + + // Simulate what build_app_id does with extra_map override + let mut app_id: HashMap> = HashMap::new(); + // Pretend URL extraction gave owner=owner, repo=repo + app_id.insert("owner".to_string(), Some("owner".to_string())); + app_id.insert("repo".to_string(), Some("repo".to_string())); + // extra_map override + for (k, v) in &app_item.info.extra_map { + app_id.insert(k.clone(), Some(v.clone())); + } + // owner should be overridden by extra_map + assert_eq!(app_id["owner"], Some("override_owner".to_string())); + assert_eq!( + app_id["android_app_package"], + Some("com.example".to_string()) + ); + // repo still from URL extraction + assert_eq!(app_id["repo"], Some("repo".to_string())); + } + + #[test] + fn test_hub_item_to_record_preserves_auth() { + // hub_item_to_record should preserve auth from existing record + // We test the logic inline since it's async and needs a HubManager + let hub_item = make_hub_item( + "hub-1", + vec!["https://github.com/%owner/%repo/".to_string()], + ); + let existing = HubRecord { + uuid: "hub-1".to_string(), + hub_config: make_hub_item("hub-1", vec![]), + auth: HashMap::from([("token".to_string(), "secret".to_string())]), + ignore_app_id_list: vec![], + applications_mode: 1, + user_ignore_app_id_list: vec![], + sort_point: -5, + }; + + // Simulate hub_item_to_record(hub_item, existing) + let record = HubRecord { + hub_config: hub_item.clone(), + ..existing.clone() + }; + + assert_eq!(record.auth["token"], "secret"); + assert_eq!(record.applications_mode, 1); + assert_eq!(record.sort_point, -5); + assert_eq!( + record.hub_config.app_url_templates[0], + "https://github.com/%owner/%repo/" + ); + } +} diff --git a/src/manager/data_getter.rs b/src/manager/data_getter.rs new file mode 100644 index 0000000..49312ef --- /dev/null +++ b/src/manager/data_getter.rs @@ -0,0 +1,99 @@ +use std::collections::{BTreeMap, HashMap}; +use std::sync::Arc; +use tokio::sync::Mutex; + +use crate::database::models::hub::HubRecord; +use crate::websdk::repo::api; +use crate::websdk::repo::data::release::ReleaseData; + +/// Result of a batch latest-release request. +/// Each entry is (app_id, Option) — None means the app wasn't found. +pub type LatestReleaseResults = Vec<(HashMap>, Option)>; + +/// Fetches release data from hub providers. +/// +/// Mirrors Kotlin's `DataGetter`. +pub struct DataGetter { + /// Per-hub mutex to prevent duplicate concurrent requests. + hub_locks: Arc>>>>, +} + +impl DataGetter { + pub fn new() -> Self { + Self { + hub_locks: Arc::new(Mutex::new(HashMap::new())), + } + } + + async fn hub_lock(&self, hub_uuid: &str) -> Arc> { + let mut locks = self.hub_locks.lock().await; + locks + .entry(hub_uuid.to_string()) + .or_insert_with(|| Arc::new(Mutex::new(()))) + .clone() + } + + /// Fetch the latest single release for each app from one hub. + /// + /// Returns a vec of `(app_id, Option)`. + /// `None` means the hub didn't return data for that app. + pub async fn get_latest_releases( + &self, + hub: &HubRecord, + app_ids: &[HashMap>], + ) -> LatestReleaseResults { + let lock = self.hub_lock(&hub.uuid).await; + let _guard = lock.lock().await; + + let mut results = Vec::with_capacity(app_ids.len()); + for app_id in app_ids { + let app_data = build_app_data(app_id, &hub.hub_config.api_keywords); + if app_data.is_empty() { + results.push((app_id.clone(), None)); + continue; + } + let hub_data = build_hub_data(&hub.auth); + let release = api::get_latest_release(&hub.uuid, &app_data, &hub_data).await; + results.push((app_id.clone(), release)); + } + results + } + + /// Fetch the full release list for a single app from one hub. + pub async fn get_release_list( + &self, + hub: &HubRecord, + app_id: &HashMap>, + ) -> Option> { + let lock = self.hub_lock(&hub.uuid).await; + let _guard = lock.lock().await; + + let app_data = build_app_data(app_id, &hub.hub_config.api_keywords); + if app_data.is_empty() { + return None; + } + let hub_data = build_hub_data(&hub.auth); + api::get_releases(&hub.uuid, &app_data, &hub_data).await + } +} + +fn build_app_data<'a>( + app_id: &'a HashMap>, + api_keywords: &[String], +) -> BTreeMap<&'a str, &'a str> { + app_id + .iter() + .filter(|(k, v)| api_keywords.contains(k) && v.is_some()) + .map(|(k, v)| (k.as_str(), v.as_deref().unwrap())) + .collect() +} + +fn build_hub_data(auth: &HashMap) -> BTreeMap<&str, &str> { + auth.iter().map(|(k, v)| (k.as_str(), v.as_str())).collect() +} + +impl Default for DataGetter { + fn default() -> Self { + Self::new() + } +} diff --git a/src/manager/hub_manager.rs b/src/manager/hub_manager.rs new file mode 100644 index 0000000..9c52503 --- /dev/null +++ b/src/manager/hub_manager.rs @@ -0,0 +1,173 @@ +use std::collections::HashMap; +use std::sync::Arc; +use tokio::sync::RwLock; + +use crate::database::get_db; +use crate::database::models::hub::HubRecord; +use crate::error::Result; + +/// In-memory hub registry backed by the JSONL database. +/// +/// Mirrors Kotlin's `HubManager`. +pub struct HubManager { + hubs: Arc>>, +} + +impl HubManager { + /// Load all hubs from the database. + pub fn load() -> Result { + let records = get_db().load_hubs()?; + let map = records.into_iter().map(|h| (h.uuid.clone(), h)).collect(); + Ok(Self { + hubs: Arc::new(RwLock::new(map)), + }) + } + + pub async fn get_hub_list(&self) -> Vec { + self.hubs.read().await.values().cloned().collect() + } + + pub async fn get_hub(&self, uuid: &str) -> Option { + self.hubs.read().await.get(uuid).cloned() + } + + /// Insert or update a hub (persists to database). + pub async fn upsert_hub(&self, record: HubRecord) -> Result<()> { + get_db().upsert_hub(&record)?; + self.hubs.write().await.insert(record.uuid.clone(), record); + Ok(()) + } + + /// Remove a hub by UUID (persists deletion to database). + pub async fn remove_hub(&self, uuid: &str) -> Result { + let deleted = get_db().delete_hub(uuid)?; + self.hubs.write().await.remove(uuid); + Ok(deleted) + } + + pub async fn is_applications_mode_enabled(&self) -> bool { + self.hubs + .read() + .await + .values() + .any(|h| h.applications_mode_enabled()) + } + + /// Update the auth map for a hub identified by UUID and persist the change. + /// + /// Returns `false` if no hub with the given UUID exists. + pub async fn update_auth(&self, uuid: &str, auth: HashMap) -> Result { + let mut hubs = self.hubs.write().await; + let hub = match hubs.get_mut(uuid) { + Some(h) => h, + None => return Ok(false), + }; + hub.auth = auth; + get_db().upsert_hub(hub)?; + Ok(true) + } + + /// Return hubs whose api_keywords contain any of the given app_id keys. + pub async fn hubs_for_app(&self, app_id: &HashMap>) -> Vec { + let app_keys: Vec<&str> = app_id.keys().map(String::as_str).collect(); + self.hubs + .read() + .await + .values() + .filter(|h| { + h.hub_config + .api_keywords + .iter() + .any(|kw| app_keys.contains(&kw.as_str())) + }) + .cloned() + .collect() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::database; + use crate::websdk::cloud_rules::data::hub_item::{HubItem, Info}; + use tempfile::TempDir; + + fn setup_db() -> TempDir { + let dir = tempfile::tempdir().unwrap(); + database::init_db(dir.path()).ok(); // may already be init in other tests + dir + } + + fn make_hub(uuid: &str) -> HubRecord { + HubRecord::new( + uuid.to_string(), + HubItem { + base_version: 6, + config_version: 1, + uuid: uuid.to_string(), + info: Info { + hub_name: "TestHub".to_string(), + hub_icon_url: None, + }, + api_keywords: vec!["owner".to_string(), "repo".to_string()], + auth_keywords: vec![], + app_url_templates: vec![], + target_check_api: None, + }, + ) + } + + // These tests use a fresh TempDir + DB each time via open() directly, + // bypassing the global singleton to allow parallel test runs. + #[test] + fn test_upsert_and_list() { + let dir = tempfile::tempdir().unwrap(); + let db = crate::database::Database::open(dir.path()).unwrap(); + let hub = make_hub("uuid-1"); + db.upsert_hub(&hub).unwrap(); + let hubs = db.load_hubs().unwrap(); + assert_eq!(hubs.len(), 1); + assert_eq!(hubs[0].uuid, "uuid-1"); + } + + #[test] + fn test_delete_hub() { + let dir = tempfile::tempdir().unwrap(); + let db = crate::database::Database::open(dir.path()).unwrap(); + let hub = make_hub("uuid-2"); + db.upsert_hub(&hub).unwrap(); + let deleted = db.delete_hub("uuid-2").unwrap(); + assert!(deleted); + assert!(db.load_hubs().unwrap().is_empty()); + } + + #[tokio::test] + async fn test_update_auth() { + let dir = tempfile::tempdir().unwrap(); + crate::database::init_db(dir.path()).ok(); + + // Insert the hub via HubManager so it is in both the global DB and in-memory state. + let mgr = HubManager::load().unwrap(); + let hub = make_hub("uuid-auth"); + mgr.upsert_hub(hub).await.unwrap(); + + let new_auth: HashMap = + [("token".to_string(), "ghp_test123".to_string())].into(); + let ok = mgr + .update_auth("uuid-auth", new_auth.clone()) + .await + .unwrap(); + assert!(ok); + + // Verify in-memory state updated. + let updated = mgr.get_hub("uuid-auth").await.unwrap(); + assert_eq!(updated.auth, new_auth); + + // Returns false for unknown UUID. + let not_found = mgr + .update_auth("no-such-uuid", HashMap::new()) + .await + .unwrap(); + assert!(!not_found); + } +} diff --git a/src/manager/mod.rs b/src/manager/mod.rs new file mode 100644 index 0000000..275d73c --- /dev/null +++ b/src/manager/mod.rs @@ -0,0 +1,12 @@ +pub mod android_api; +pub mod app_manager; +pub mod app_status; +pub mod auto_template; +pub mod cloud_config_getter; +pub mod data_getter; +pub mod hub_manager; +pub mod notification; +pub mod updater; +pub mod url_replace; +pub mod version; +pub mod version_map; diff --git a/src/manager/notification.rs b/src/manager/notification.rs new file mode 100644 index 0000000..ef23840 --- /dev/null +++ b/src/manager/notification.rs @@ -0,0 +1,152 @@ +use once_cell::sync::OnceCell; +use serde::{Deserialize, Serialize}; + +use crate::database::models::app::AppRecord; +use crate::manager::app_status::AppStatus; + +/// Global notification dispatcher, registered via `register_notification` RPC. +static NOTIFICATION: OnceCell = OnceCell::new(); + +pub fn set_notification(url: String) { + let _ = NOTIFICATION.set(NotificationDispatcher { url }); +} + +pub fn get_notification() -> Option<&'static NotificationDispatcher> { + NOTIFICATION.get() +} + +/// Events emitted by the Rust manager layer to the Kotlin UI. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ManagerEvent { + AppStatusChanged { + record_id: String, + app_id: std::collections::HashMap>, + old_status: AppStatus, + new_status: AppStatus, + }, + RenewProgress { + done: usize, + total: usize, + }, + AppAdded { + record: AppRecord, + }, + AppDeleted { + record_id: String, + }, + AppDatabaseChanged { + record: AppRecord, + }, +} + +/// Dispatches manager events to the Kotlin UI layer via HTTP JSON-RPC. +/// +/// Kotlin registers a notification URL via `register_notification` RPC. +/// When an event fires, Rust POSTs `on_manager_event({event})` to that URL. +/// The Kotlin server handles it and updates the UI (ViewModel / LiveData). +pub struct NotificationDispatcher { + url: String, +} + +impl NotificationDispatcher { + /// Fire an event notification to Kotlin. Best-effort: errors are logged but not propagated. + pub async fn notify(&self, event: ManagerEvent) { + let body = match build_jsonrpc_request("on_manager_event", &event) { + Ok(b) => b, + Err(e) => { + eprintln!("NotificationDispatcher: failed to serialize event: {e}"); + return; + } + }; + + let client = reqwest::Client::new(); + match client + .post(&self.url) + .header("Content-Type", "application/json") + .body(body) + .send() + .await + { + Ok(_) => {} + Err(e) => { + eprintln!("NotificationDispatcher: failed to send notification: {e}"); + } + } + } +} + +fn build_jsonrpc_request( + method: &str, + params: &T, +) -> Result { + #[derive(Serialize)] + struct JsonRpcRequest<'a, P: Serialize> { + jsonrpc: &'a str, + method: &'a str, + params: &'a P, + id: u64, + } + serde_json::to_string(&JsonRpcRequest { + jsonrpc: "2.0", + method, + params, + id: 1, + }) +} + +/// Convenience: notify if a dispatcher is registered (no-op otherwise). +pub async fn notify_if_registered(event: ManagerEvent) { + if let Some(dispatcher) = get_notification() { + dispatcher.notify(event).await; + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_event_serialization_status_changed() { + let event = ManagerEvent::AppStatusChanged { + record_id: "abc-123".to_string(), + app_id: std::collections::HashMap::from([( + "owner".to_string(), + Some("alice".to_string()), + )]), + old_status: AppStatus::AppPending, + new_status: AppStatus::AppOutdated, + }; + let json = serde_json::to_string(&event).unwrap(); + assert!(json.contains("app_status_changed")); + assert!(json.contains("abc-123")); + assert!(json.contains("app_outdated")); + } + + #[test] + fn test_event_serialization_renew_progress() { + let event = ManagerEvent::RenewProgress { done: 3, total: 10 }; + let json = serde_json::to_string(&event).unwrap(); + assert!(json.contains("renew_progress")); + assert!(json.contains("10")); + } + + #[test] + fn test_event_serialization_app_deleted() { + let event = ManagerEvent::AppDeleted { + record_id: "del-456".to_string(), + }; + let json = serde_json::to_string(&event).unwrap(); + assert!(json.contains("app_deleted")); + assert!(json.contains("del-456")); + } + + #[test] + fn test_jsonrpc_request_format() { + let event = ManagerEvent::RenewProgress { done: 1, total: 5 }; + let body = build_jsonrpc_request("on_manager_event", &event).unwrap(); + assert!(body.contains("\"jsonrpc\":\"2.0\"")); + assert!(body.contains("\"method\":\"on_manager_event\"")); + assert!(body.contains("renew_progress")); + } +} diff --git a/src/manager/updater.rs b/src/manager/updater.rs new file mode 100644 index 0000000..ec2fd24 --- /dev/null +++ b/src/manager/updater.rs @@ -0,0 +1,151 @@ +use super::app_status::AppStatus; +use super::version::VersionInfo; +use super::version_map::{HubStatus, VersionMap}; +use std::collections::HashMap; + +/// Determines the release status for an app given its version map and local version. +/// +/// Mirrors Kotlin's `Updater.getReleaseStatus()`. +pub fn get_release_status( + version_map: &mut VersionMap, + local_version: Option<&str>, + ignore_version: Option<&str>, + is_saved: bool, +) -> AppStatus { + let versions = version_map.get_version_list(); + + if versions.is_empty() { + if version_map.is_renewing() { + return AppStatus::AppPending; + } + let all_error = !version_map.hub_status.is_empty() + && version_map + .hub_status + .values() + .all(|s| *s == HubStatus::Error); + if all_error || is_saved { + return AppStatus::NetworkError; + } + return AppStatus::AppInactive; + } + + let latest_name = &versions[0].version_info.name; + + // If the latest version matches what the user chose to ignore + if let Some(ignored) = ignore_version { + if ignored == latest_name { + return AppStatus::AppLatest; + } + } + + let effective_local = local_version.or(ignore_version); + + match effective_local { + None => AppStatus::AppNoLocal, + Some(local_str) => { + let local_info = VersionInfo::new(local_str, None, None, HashMap::new()); + let latest_info = &versions[0].version_info; + if is_latest(&local_info, latest_info) { + AppStatus::AppLatest + } else { + AppStatus::AppOutdated + } + } + } +} + +fn is_latest(local: &VersionInfo, latest: &VersionInfo) -> bool { + use std::cmp::Ordering; + match local.compare(latest) { + Some(Ordering::Greater) | Some(Ordering::Equal) => true, + Some(Ordering::Less) => false, + None => { + // Fallback: string equality check + local.name == latest.name + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::manager::version_map::VersionMap; + use crate::websdk::repo::data::release::{AssetData, ReleaseData}; + + fn release(v: &str) -> ReleaseData { + ReleaseData { + version_number: v.to_string(), + changelog: String::new(), + assets: vec![AssetData { + file_name: "app.apk".to_string(), + file_type: "apk".to_string(), + download_url: "https://x.com".to_string(), + }], + extra: None, + } + } + + fn vm_with(versions: &[&str]) -> VersionMap { + let mut vm = VersionMap::new(None, None); + vm.add_release_list("hub1", versions.iter().map(|v| release(v)).collect()); + vm + } + + #[test] + fn test_latest() { + let mut vm = vm_with(&["2.0.0", "1.0.0"]); + let status = get_release_status(&mut vm, Some("2.0.0"), None, true); + assert_eq!(status, AppStatus::AppLatest); + } + + #[test] + fn test_outdated() { + let mut vm = vm_with(&["2.0.0", "1.0.0"]); + let status = get_release_status(&mut vm, Some("1.0.0"), None, true); + assert_eq!(status, AppStatus::AppOutdated); + } + + #[test] + fn test_no_local() { + let mut vm = vm_with(&["2.0.0"]); + let status = get_release_status(&mut vm, None, None, true); + assert_eq!(status, AppStatus::AppNoLocal); + } + + #[test] + fn test_ignored_version() { + let mut vm = vm_with(&["2.0.0"]); + let status = get_release_status(&mut vm, None, Some("2.0.0"), true); + assert_eq!(status, AppStatus::AppLatest); + } + + #[test] + fn test_network_error() { + let mut vm = VersionMap::new(None, None); + vm.set_error("hub1"); + let status = get_release_status(&mut vm, Some("1.0.0"), None, true); + assert_eq!(status, AppStatus::NetworkError); + } + + #[test] + fn test_pending() { + let mut vm = VersionMap::new(None, None); + vm.mark_renewing("hub1"); + let status = get_release_status(&mut vm, Some("1.0.0"), None, true); + assert_eq!(status, AppStatus::AppPending); + } + + #[test] + fn test_inactive_unsaved() { + let mut vm = VersionMap::new(None, None); + let status = get_release_status(&mut vm, Some("1.0.0"), None, false); + assert_eq!(status, AppStatus::AppInactive); + } + + #[test] + fn test_local_newer_than_remote() { + let mut vm = vm_with(&["1.0.0"]); + let status = get_release_status(&mut vm, Some("2.0.0"), None, true); + assert_eq!(status, AppStatus::AppLatest); + } +} diff --git a/src/manager/url_replace.rs b/src/manager/url_replace.rs new file mode 100644 index 0000000..e205d0e --- /dev/null +++ b/src/manager/url_replace.rs @@ -0,0 +1,170 @@ +/// Applies URL replacement rules from an ExtraHub configuration. +/// +/// Mirrors Kotlin's `URLReplace.replaceURL()`. +/// +/// Three replacement modes: +/// 1. Plain regex: `replace(search_regex, replace_str)` across the full URL. +/// 2. Host-only: if `replace_str` looks like a bare host URL (no path), only +/// the host portion of the original URL is replaced. +/// 3. Template: if `replace_str` contains `{DOWNLOAD_URL}`, the original URL +/// is embedded as a parameter (e.g. proxy wrappers). +pub fn apply_url_replace(url: &str, search: Option<&str>, replace: Option<&str>) -> String { + let replace_str = match replace { + Some(r) if !r.is_empty() => r, + _ => return url.to_string(), + }; + + // Mode 3: template substitution — replace_str contains {DOWNLOAD_URL} + if replace_str.contains("{DOWNLOAD_URL}") { + return replace_str.replace("{DOWNLOAD_URL}", url); + } + + // Mode 2: host-only replacement — replace_str is a bare host URL (no path component) + if is_host_only(replace_str) { + return replace_host(url, replace_str); + } + + // Mode 1: plain regex replacement (or literal if no search) + match search { + Some(pattern) if !pattern.is_empty() => match regex::Regex::new(pattern) { + Ok(re) => re.replace_all(url, replace_str).into_owned(), + Err(_) => url.replace(pattern, replace_str), + }, + // No search pattern: nothing to replace + _ => url.to_string(), + } +} + +/// Returns true if `s` looks like a bare host URL with no meaningful path. +/// e.g. "https://mirror.example.com" or "https://mirror.example.com/" +fn is_host_only(s: &str) -> bool { + match url::Url::parse(s) { + Ok(u) => { + let path = u.path(); + path.is_empty() || path == "/" + } + Err(_) => false, + } +} + +/// Replace only the host (scheme + host + port) of `original_url` with the +/// host from `host_url`, keeping the original path, query and fragment. +fn replace_host(original_url: &str, host_url: &str) -> String { + let orig = match url::Url::parse(original_url) { + Ok(u) => u, + Err(_) => return original_url.to_string(), + }; + let host = match url::Url::parse(host_url) { + Ok(u) => u, + Err(_) => return original_url.to_string(), + }; + + // Rebuild: scheme + host from `host`, everything else from `orig` + let mut result = host.clone(); + result.set_path(orig.path()); + result.set_query(orig.query()); + result.set_fragment(orig.fragment()); + result.to_string() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_no_replace_returns_original() { + let url = "https://github.com/user/repo/releases/download/v1.0/app.apk"; + assert_eq!(apply_url_replace(url, None, None), url); + assert_eq!(apply_url_replace(url, None, Some("")), url); + } + + #[test] + fn test_download_url_template() { + let url = "https://github.com/user/repo/releases/download/v1.0/app.apk"; + let replace = "https://ghproxy.com/?url={DOWNLOAD_URL}"; + let result = apply_url_replace(url, None, Some(replace)); + assert_eq!( + result, + "https://ghproxy.com/?url=https://github.com/user/repo/releases/download/v1.0/app.apk" + ); + } + + #[test] + fn test_host_only_replacement() { + let url = "https://github.com/user/repo/releases/download/v1.0/app.apk"; + let result = apply_url_replace(url, None, Some("https://mirror.ghproxy.com")); + assert!(result.contains("mirror.ghproxy.com")); + assert!(result.contains("/user/repo/releases/download/v1.0/app.apk")); + assert!(!result.contains("github.com")); + } + + #[test] + fn test_host_only_with_trailing_slash() { + let url = "https://github.com/owner/repo/archive/v2.zip"; + let result = apply_url_replace(url, None, Some("https://mirror.example.com/")); + assert!(result.contains("mirror.example.com")); + assert!(result.contains("/owner/repo/archive/v2.zip")); + } + + #[test] + fn test_regex_replacement() { + let url = "https://github.com/user/repo/releases/download/v1.0/app.apk"; + let result = apply_url_replace(url, Some("github\\.com"), Some("github.com.cnpmjs.org")); + assert!(result.contains("github.com.cnpmjs.org")); + assert!(!result.contains("//github.com/")); + } + + #[test] + fn test_invalid_regex_falls_back_to_literal() { + let url = "https://github.com/user/repo"; + // Invalid regex pattern — should fall back to literal string replace + let result = apply_url_replace(url, Some("github.com"), Some("gitlab.com")); + assert!(result.contains("gitlab.com")); + } + + // ------------------------------------------------------------------------- + // Phase 8: chained GLOBAL + hub-specific rules (as applied in get_download) + // ------------------------------------------------------------------------- + + #[test] + fn test_global_then_hub_specific_chain() { + // Simulate: GLOBAL rule replaces github.com host, then hub rule wraps via template. + let url = "https://github.com/owner/repo/releases/download/v1.0/app.apk"; + + // Step 1 — GLOBAL rule: replace host with mirror + let after_global = apply_url_replace(url, None, Some("https://mirror.example.com")); + assert!(after_global.contains("mirror.example.com")); + assert!(!after_global.contains("github.com")); + + // Step 2 — hub-specific rule: wrap with proxy template + let after_hub = apply_url_replace( + &after_global, + None, + Some("https://proxy.example.com/?url={DOWNLOAD_URL}"), + ); + assert!(after_hub.starts_with("https://proxy.example.com/?url=")); + assert!(after_hub.contains("mirror.example.com")); + } + + #[test] + fn test_no_global_rule_hub_rule_applies() { + let url = "https://github.com/owner/repo/archive/v2.zip"; + + // GLOBAL has no replace rule — url unchanged + let after_global = apply_url_replace(url, None, None); + assert_eq!(after_global, url); + + // hub-specific regex rule + let after_hub = + apply_url_replace(&after_global, Some("github\\.com"), Some("gh.example.com")); + assert!(after_hub.contains("gh.example.com")); + } + + #[test] + fn test_both_rules_none_url_unchanged() { + let url = "https://github.com/owner/repo/releases/v1.apk"; + let after_global = apply_url_replace(url, None, None); + let after_hub = apply_url_replace(&after_global, None, None); + assert_eq!(after_hub, url); + } +} diff --git a/src/manager/version.rs b/src/manager/version.rs new file mode 100644 index 0000000..85547e7 --- /dev/null +++ b/src/manager/version.rs @@ -0,0 +1,136 @@ +use crate::utils::versioning::Version as VersionUtil; +use crate::websdk::repo::data::release::ReleaseData; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +/// A parsed, comparable version identifier. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct VersionInfo { + /// Normalized version name (regex-filtered) + pub name: String, + /// Extra metadata (e.g. version_code from Android) + pub extra: HashMap, +} + +impl VersionInfo { + pub fn new( + raw_name: &str, + invalid_regex: Option<&str>, + include_regex: Option<&str>, + extra: HashMap, + ) -> Self { + let name = normalize_version(raw_name, invalid_regex, include_regex); + Self { name, extra } + } + + /// Compare using libversion. Returns Some(Ordering) if both are parseable. + pub fn compare(&self, other: &VersionInfo) -> Option { + let v1 = VersionUtil::new(self.name.clone()); + let v2 = VersionUtil::new(other.name.clone()); + v1.partial_cmp(&v2) + } +} + +impl PartialEq for VersionInfo { + fn eq(&self, other: &Self) -> bool { + self.name == other.name + } +} + +impl Eq for VersionInfo {} + +impl std::hash::Hash for VersionInfo { + fn hash(&self, state: &mut H) { + self.name.hash(state); + } +} + +impl PartialOrd for VersionInfo { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for VersionInfo { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.compare(other).unwrap_or(std::cmp::Ordering::Less) + } +} + +/// Strip unwanted parts from a version string using optional regex filters. +fn normalize_version( + raw: &str, + invalid_regex: Option<&str>, + include_regex: Option<&str>, +) -> String { + let mut result = raw.to_string(); + + if let Some(pattern) = invalid_regex { + if let Ok(re) = regex::Regex::new(pattern) { + result = re.replace_all(&result, "").to_string(); + } + } + + if let Some(pattern) = include_regex { + if let Ok(re) = regex::Regex::new(pattern) { + let matched: Vec<&str> = re.find_iter(&result).map(|m| m.as_str()).collect(); + result = matched.join(""); + } + } + + result.trim().to_string() +} + +/// A release from one hub, paired with its assets. +#[derive(Debug, Clone)] +pub struct VersionWrapper { + pub hub_uuid: String, + pub release: ReleaseData, + /// (release_index, asset_index) pairs + pub asset_indices: Vec<(usize, usize)>, +} + +/// Snapshot of a single version with all hub-provided wrappers. +#[derive(Debug, Clone)] +pub struct Version { + pub version_info: VersionInfo, + pub wrappers: Vec, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_version_info_compare() { + let v1 = VersionInfo::new("1.0.0", None, None, HashMap::new()); + let v2 = VersionInfo::new("1.0.1", None, None, HashMap::new()); + assert!(v1 < v2); + assert!(v2 > v1); + } + + #[test] + fn test_version_info_equal() { + let v1 = VersionInfo::new("1.0.0", None, None, HashMap::new()); + let v2 = VersionInfo::new("1.0.0", None, None, HashMap::new()); + assert_eq!(v1, v2); + } + + #[test] + fn test_normalize_version_invalid_regex() { + let name = normalize_version("v1.0.0", Some("^v"), None); + assert_eq!(name, "1.0.0"); + } + + #[test] + fn test_normalize_version_include_regex() { + let name = normalize_version("Release 1.0.0 (stable)", None, Some(r"\d+\.\d+\.\d+")); + assert_eq!(name, "1.0.0"); + } + + #[test] + fn test_normalize_version_both_filters() { + let name = normalize_version("v1.0.0-beta", Some(r"-beta"), Some(r"\d+\.\d+\.\d+")); + assert_eq!(name, "1.0.0"); + } +} diff --git a/src/manager/version_map.rs b/src/manager/version_map.rs new file mode 100644 index 0000000..9d275cb --- /dev/null +++ b/src/manager/version_map.rs @@ -0,0 +1,192 @@ +use std::collections::HashMap; + +use super::version::{Version, VersionInfo, VersionWrapper}; +use crate::websdk::repo::data::release::ReleaseData; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum HubStatus { + Renewing, + Error, + /// Got latest release only (single entry per hub) + Single, + /// Got full release list + Full, +} + +/// In-memory version data for a single app, keyed by VersionInfo. +/// +/// Mirrors Kotlin's `VersionMap`. +#[derive(Debug)] +pub struct VersionMap { + pub invalid_version_regex: Option, + pub include_version_regex: Option, + /// Aggregated releases, grouped by normalized VersionInfo + entries: HashMap>, + pub hub_status: HashMap, + /// Cached sorted list, invalidated on mutation + sorted_cache: Option>, +} + +impl VersionMap { + pub fn new(invalid_regex: Option, include_regex: Option) -> Self { + Self { + invalid_version_regex: invalid_regex, + include_version_regex: include_regex, + entries: HashMap::new(), + hub_status: HashMap::new(), + sorted_cache: None, + } + } + + pub fn is_renewing(&self) -> bool { + self.hub_status.values().any(|s| *s == HubStatus::Renewing) + } + + pub fn mark_renewing(&mut self, hub_uuid: &str) { + self.hub_status + .insert(hub_uuid.to_string(), HubStatus::Renewing); + self.sorted_cache = None; + } + + pub fn set_error(&mut self, hub_uuid: &str) { + self.hub_status + .insert(hub_uuid.to_string(), HubStatus::Error); + } + + pub fn add_release_list(&mut self, hub_uuid: &str, releases: Vec) { + for (rel_idx, release) in releases.iter().enumerate() { + let info = self.make_version_info(&release.version_number); + let wrapper = VersionWrapper { + hub_uuid: hub_uuid.to_string(), + release: release.clone(), + asset_indices: (0..release.assets.len()).map(|i| (rel_idx, i)).collect(), + }; + self.entries.entry(info).or_default().push(wrapper); + } + self.hub_status + .insert(hub_uuid.to_string(), HubStatus::Full); + self.sorted_cache = None; + } + + pub fn add_single_release(&mut self, hub_uuid: &str, release: ReleaseData) { + let info = self.make_version_info(&release.version_number); + let wrapper = VersionWrapper { + hub_uuid: hub_uuid.to_string(), + asset_indices: (0..release.assets.len()).map(|i| (0, i)).collect(), + release, + }; + self.entries.entry(info).or_default().push(wrapper); + self.hub_status + .insert(hub_uuid.to_string(), HubStatus::Single); + self.sorted_cache = None; + } + + /// Returns versions sorted descending (newest first). + pub fn get_version_list(&mut self) -> &[Version] { + if self.sorted_cache.is_none() { + let mut versions: Vec = self + .entries + .iter() + .filter(|(info, _)| !info.name.is_empty()) + .map(|(info, wrappers)| Version { + version_info: info.clone(), + wrappers: wrappers.clone(), + }) + .collect(); + versions.sort_by(|a, b| b.version_info.cmp(&a.version_info)); + self.sorted_cache = Some(versions); + } + self.sorted_cache.as_deref().unwrap() + } + + fn make_version_info(&self, raw: &str) -> VersionInfo { + VersionInfo::new( + raw, + self.invalid_version_regex.as_deref(), + self.include_version_regex.as_deref(), + HashMap::new(), + ) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::websdk::repo::data::release::AssetData; + + fn make_release(version: &str) -> ReleaseData { + ReleaseData { + version_number: version.to_string(), + changelog: String::new(), + assets: vec![AssetData { + file_name: "app.apk".to_string(), + file_type: "apk".to_string(), + download_url: "https://example.com".to_string(), + }], + extra: None, + } + } + + #[test] + fn test_add_and_sort() { + let mut vm = VersionMap::new(None, None); + vm.add_release_list( + "hub1", + vec![ + make_release("1.0.0"), + make_release("2.0.0"), + make_release("1.5.0"), + ], + ); + let list = vm.get_version_list(); + assert_eq!(list[0].version_info.name, "2.0.0"); + assert_eq!(list[1].version_info.name, "1.5.0"); + assert_eq!(list[2].version_info.name, "1.0.0"); + } + + #[test] + fn test_single_release() { + let mut vm = VersionMap::new(None, None); + vm.add_single_release("hub1", make_release("3.0.0")); + let list = vm.get_version_list(); + assert_eq!(list.len(), 1); + assert_eq!(list[0].version_info.name, "3.0.0"); + assert_eq!(vm.hub_status["hub1"], HubStatus::Single); + } + + #[test] + fn test_hub_status_error() { + let mut vm = VersionMap::new(None, None); + vm.set_error("hub1"); + assert_eq!(vm.hub_status["hub1"], HubStatus::Error); + } + + #[test] + fn test_is_renewing() { + let mut vm = VersionMap::new(None, None); + assert!(!vm.is_renewing()); + vm.mark_renewing("hub1"); + assert!(vm.is_renewing()); + vm.set_error("hub1"); + assert!(!vm.is_renewing()); + } + + #[test] + fn test_dedup_versions_across_hubs() { + let mut vm = VersionMap::new(None, None); + vm.add_single_release("hub1", make_release("1.0.0")); + vm.add_single_release("hub2", make_release("1.0.0")); + // Same version from two hubs → merged under one VersionInfo key + let list = vm.get_version_list(); + assert_eq!(list.len(), 1); + assert_eq!(list[0].wrappers.len(), 2); + } + + #[test] + fn test_regex_filtering() { + let mut vm = VersionMap::new(Some("^v".to_string()), None); + vm.add_single_release("hub1", make_release("v1.2.3")); + let list = vm.get_version_list(); + assert_eq!(list[0].version_info.name, "1.2.3"); + } +} diff --git a/src/rpc.rs b/src/rpc.rs index d1e779c..162790c 100644 --- a/src/rpc.rs +++ b/src/rpc.rs @@ -1,3 +1,3 @@ pub mod client; -mod data; +pub mod data; pub mod server; diff --git a/src/rpc/client.rs b/src/rpc/client.rs index 3fc3068..94c6091 100644 --- a/src/rpc/client.rs +++ b/src/rpc/client.rs @@ -58,4 +58,20 @@ impl Client { }; self.client.request("get_releases", data).await } + + pub async fn get_download( + &self, + hub_uuid: &str, + app_data: BTreeMap<&str, &str>, + hub_data: BTreeMap<&str, &str>, + asset_index: &[i32], + ) -> Result, Error> { + let data = RpcDownloadInfoRequest { + hub_uuid, + app_data, + hub_data, + asset_index: asset_index.to_vec(), + }; + self.client.request("get_download", data).await + } } diff --git a/src/rpc/data.rs b/src/rpc/data.rs index 47f6b44..77f3a88 100644 --- a/src/rpc/data.rs +++ b/src/rpc/data.rs @@ -1,7 +1,8 @@ +use crate::downloader::{DownloadState, TaskInfo}; use jsonrpsee::core::traits::ToRpcParams; use serde::{Deserialize, Serialize}; use serde_json::value::to_raw_value; -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashMap}; #[derive(Serialize, Deserialize, Debug)] pub struct RpcInitRequest<'a> { @@ -39,3 +40,372 @@ impl ToRpcParams for RpcCloudConfigRequest<'_> { to_raw_value(&self).map(Some) } } + +// ============================================================================ +// Provider Registration RPC Data Structures +// ============================================================================ + +/// Request to register an external provider +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcRegisterProviderRequest<'a> { + pub hub_uuid: &'a str, + pub url: &'a str, +} + +impl ToRpcParams for RpcRegisterProviderRequest<'_> { + fn to_rpc_params(self) -> Result>, serde_json::Error> { + to_raw_value(&self).map(Some) + } +} + +/// Request to get download info for an app +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcDownloadInfoRequest<'a> { + pub hub_uuid: &'a str, + pub app_data: BTreeMap<&'a str, &'a str>, + pub hub_data: BTreeMap<&'a str, &'a str>, + pub asset_index: Vec, +} + +impl ToRpcParams for RpcDownloadInfoRequest<'_> { + fn to_rpc_params(self) -> Result>, serde_json::Error> { + to_raw_value(&self).map(Some) + } +} + +/// Download item data returned by get_download +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct DownloadItemData { + pub name: Option, + pub url: String, + #[serde(default)] + pub headers: Option>, + #[serde(default)] + pub cookies: Option>, +} + +// ============================================================================ +// Downloader RPC Data Structures +// ============================================================================ + +/// Request to submit a download task +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcDownloadRequest<'a> { + pub url: &'a str, + pub dest_path: &'a str, + #[serde(default)] + pub headers: Option>, + #[serde(default)] + pub cookies: Option>, + /// Hub UUID for routing to registered external downloaders. + /// When set, routes to external downloader; when None, uses default HTTP downloader. + #[serde(default)] + pub hub_uuid: Option, +} + +impl ToRpcParams for RpcDownloadRequest<'_> { + fn to_rpc_params(self) -> Result>, serde_json::Error> { + to_raw_value(&self).map(Some) + } +} + +/// Request to submit multiple download tasks +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcDownloadBatchRequest { + pub tasks: Vec, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcDownloadTask { + pub url: String, + pub dest_path: String, +} + +impl ToRpcParams for RpcDownloadBatchRequest { + fn to_rpc_params(self) -> Result>, serde_json::Error> { + to_raw_value(&self).map(Some) + } +} + +/// Response with task ID +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct RpcTaskIdResponse { + pub task_id: String, +} + +/// Response with multiple task IDs +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct RpcTaskIdsResponse { + pub task_ids: Vec, +} + +/// Request to query task status +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcTaskStatusRequest<'a> { + pub task_id: &'a str, +} + +impl ToRpcParams for RpcTaskStatusRequest<'_> { + fn to_rpc_params(self) -> Result>, serde_json::Error> { + to_raw_value(&self).map(Some) + } +} + +/// Request to wait for task state change (long-polling) +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcWaitForChangeRequest<'a> { + pub task_id: &'a str, + pub timeout_seconds: u64, +} + +impl ToRpcParams for RpcWaitForChangeRequest<'_> { + fn to_rpc_params(self) -> Result>, serde_json::Error> { + to_raw_value(&self).map(Some) + } +} + +/// Request to cancel a task +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcCancelTaskRequest<'a> { + pub task_id: &'a str, +} + +impl ToRpcParams for RpcCancelTaskRequest<'_> { + fn to_rpc_params(self) -> Result>, serde_json::Error> { + to_raw_value(&self).map(Some) + } +} + +/// Request to pause a task +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcPauseTaskRequest<'a> { + pub task_id: &'a str, +} + +impl ToRpcParams for RpcPauseTaskRequest<'_> { + fn to_rpc_params(self) -> Result>, serde_json::Error> { + to_raw_value(&self).map(Some) + } +} + +/// Request to resume a task +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcResumeTaskRequest<'a> { + pub task_id: &'a str, +} + +impl ToRpcParams for RpcResumeTaskRequest<'_> { + fn to_rpc_params(self) -> Result>, serde_json::Error> { + to_raw_value(&self).map(Some) + } +} + +/// Request to query tasks by state +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcTasksByStateRequest { + pub state: DownloadState, +} + +impl ToRpcParams for RpcTasksByStateRequest { + fn to_rpc_params(self) -> Result>, serde_json::Error> { + to_raw_value(&self).map(Some) + } +} + +/// Response with task information +pub type RpcTaskInfoResponse = TaskInfo; + +/// Response with multiple tasks +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct RpcTasksResponse { + pub tasks: Vec, +} + +// ============================================================================ +// Downloader Registration RPC Data Structures +// ============================================================================ + +/// Request to register an external downloader for a hub_uuid +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcRegisterDownloaderRequest<'a> { + pub hub_uuid: &'a str, + pub rpc_url: &'a str, +} + +impl ToRpcParams for RpcRegisterDownloaderRequest<'_> { + fn to_rpc_params(self) -> Result>, serde_json::Error> { + to_raw_value(&self).map(Some) + } +} + +/// Request to unregister an external downloader for a hub_uuid +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcUnregisterDownloaderRequest<'a> { + pub hub_uuid: &'a str, +} + +impl ToRpcParams for RpcUnregisterDownloaderRequest<'_> { + fn to_rpc_params(self) -> Result>, serde_json::Error> { + to_raw_value(&self).map(Some) + } +} + +// ============================================================================ +// Manager RPC Data Structures +// ============================================================================ + +use crate::database::models::app::AppRecord; +use crate::database::models::extra_hub::ExtraHubRecord; +use crate::database::models::hub::HubRecord; +use crate::manager::app_status::AppStatus; + +/// Response wrapping a list of apps with their current status. +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct AppWithStatus { + pub record: AppRecord, + pub status: AppStatus, +} + +/// Request to save/update an app record. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcSaveAppRequest { + pub record: AppRecord, +} + +/// Request to delete an app by record id. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcDeleteAppRequest { + pub record_id: String, +} + +/// Request to get a single app by record id. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcGetAppRequest { + pub record_id: String, +} + +/// Request to save/update a hub record. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcSaveHubRequest { + pub record: HubRecord, +} + +/// Request to delete a hub by UUID. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcDeleteHubRequest { + pub hub_uuid: String, +} + +/// Request to get a hub by UUID. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcGetHubRequest { + pub hub_uuid: String, +} + +/// Request to set the applications mode for a hub. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcSetApplicationsModeRequest { + pub hub_uuid: String, + pub enable: bool, +} + +/// Request to ignore/unignore an app in a hub. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcHubIgnoreAppRequest { + pub hub_uuid: String, + pub app_id: HashMap>, + pub ignore: bool, +} + +/// Request to set virtual (installed) apps list from Android. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcSetVirtualAppsRequest { + pub apps: Vec, +} + +/// Request to get app status. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcGetAppStatusRequest { + pub record_id: String, +} + +/// Request to save/update an ExtraHub record. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcSaveExtraHubRequest { + pub record: ExtraHubRecord, +} + +/// Request to get an ExtraHub by id. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcGetExtraHubRequest { + pub id: String, +} + +// ============================================================================ +// Android API / Notification Registration RPC Data Structures +// ============================================================================ + +/// Request to register the Kotlin Android API callback URL. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcRegisterAndroidApiRequest { + pub url: String, +} + +/// Request to register the Kotlin notification callback URL. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcRegisterNotificationRequest { + pub url: String, +} + +// ============================================================================ +// ExtraApp RPC Data Structures +// ============================================================================ + +use crate::database::models::extra_app::ExtraAppRecord; + +/// Request to get an ExtraApp record by app_id map. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcGetExtraAppRequest { + pub app_id: HashMap>, +} + +/// Request to save/update an ExtraApp record. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcSaveExtraAppRequest { + pub record: ExtraAppRecord, +} + +/// Request to delete an ExtraApp by database id. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcDeleteExtraAppRequest { + pub id: String, +} + +// ============================================================================ +// Cloud Config Manager RPC Data Structures +// ============================================================================ + +/// Request to apply a specific cloud hub/app config by UUID. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcCloudConfigApplyRequest { + pub uuid: String, +} + +/// Request to initialise the CloudConfigGetter with an API URL. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcCloudConfigInitRequest { + pub api_url: String, +} + +/// Request to update the auth map for a hub. +#[derive(Serialize, Deserialize, Debug)] +pub struct RpcUpdateHubAuthRequest { + pub hub_uuid: String, + pub auth: HashMap, +} + +impl ToRpcParams for RpcUpdateHubAuthRequest { + fn to_rpc_params(self) -> Result>, serde_json::Error> { + to_raw_value(&self).map(Some) + } +} diff --git a/src/rpc/server.rs b/src/rpc/server.rs index f12466d..6e1e54f 100644 --- a/src/rpc/server.rs +++ b/src/rpc/server.rs @@ -1,21 +1,87 @@ use super::data::*; -use crate::api as api_root; +use crate::cache::init_cache_manager_with_expire; +use crate::core::config::world::{init_world_list, world_list}; +use crate::database::get_db; +use crate::database::models::extra_hub::GLOBAL_HUB_ID; +use crate::downloader::{DownloadConfig, DownloadTaskManager}; +use crate::manager::android_api; +use crate::manager::app_manager::AppManager; +use crate::manager::cloud_config_getter::CloudConfigGetter; +use crate::manager::hub_manager::HubManager; +use crate::manager::notification; +use crate::manager::url_replace::apply_url_replace; use crate::websdk::cloud_rules::cloud_rules_manager::CloudRules; use crate::websdk::repo::api; -use jsonrpsee::server::{RpcModule, Server, ServerHandle}; +use jsonrpsee::server::{RpcModule, Server, ServerConfig, ServerHandle}; use jsonrpsee::types::{ErrorCode, ErrorObjectOwned}; +use once_cell::sync::OnceCell; use std::net::SocketAddr; use std::path::Path; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::time::Duration; +use tokio::sync::RwLock; + +/// Global manager state initialised on first `init` RPC call. +static APP_MANAGER: OnceCell>> = OnceCell::new(); +static HUB_MANAGER: OnceCell>> = OnceCell::new(); +static CLOUD_CONFIG_GETTER: OnceCell>> = OnceCell::new(); + +fn get_app_manager() -> Option>> { + APP_MANAGER.get().cloned() +} + +fn get_hub_manager() -> Option>> { + HUB_MANAGER.get().cloned() +} + +fn get_cloud_config_getter() -> Option>> { + CLOUD_CONFIG_GETTER.get().cloned() +} + +fn manager_not_init_err() -> ErrorObjectOwned { + ErrorObjectOwned::owned( + ErrorCode::InternalError.code(), + "Manager not initialized. Call init first.", + None::, + ) +} + +fn map_manager_err(e: impl std::fmt::Display) -> ErrorObjectOwned { + ErrorObjectOwned::owned( + ErrorCode::InternalError.code(), + e.to_string(), + None::, + ) +} + +// Default 2GB size limit for WebSocket messages +// Can be overridden at runtime by setting GETTER_WS_MAX_MESSAGE_SIZE environment variable +// Example: GETTER_WS_MAX_MESSAGE_SIZE=1073741824 ./getter (for 1GB) +const DEFAULT_MAX_SIZE: u32 = 2 * 1024 * 1024 * 1024; // 2GB + +fn get_max_message_size() -> u32 { + // Allow runtime configuration via environment variable + match std::env::var("GETTER_WS_MAX_MESSAGE_SIZE") { + Ok(size_str) => size_str.parse().unwrap_or(DEFAULT_MAX_SIZE), + Err(_) => DEFAULT_MAX_SIZE, + } +} pub async fn run_server( addr: &str, is_running: Arc, ) -> Result<(String, ServerHandle), Box> { let addr = if addr.is_empty() { "127.0.0.1:0" } else { addr }; - let server = Server::builder().build(addr.parse::()?).await?; + let max_size = get_max_message_size(); + let config = ServerConfig::builder() + .max_request_body_size(max_size) + .max_response_body_size(max_size) + .build(); + let server = Server::builder() + .set_config(config) + .build(addr.parse::()?) + .await?; let mut module = RpcModule::new(()); // Register the shutdown method let run_flag = is_running.clone(); @@ -30,82 +96,134 @@ pub async fn run_server( let request = params.parse::()?; let data_dir = Path::new(request.data_path); let cache_dir = Path::new(request.cache_path); - api_root::init(data_dir, cache_dir, request.global_expire_time) - .await - .map(|_| true) - .map_err(|e| { - ErrorObjectOwned::owned( - ErrorCode::InternalError.code(), - "Internal error", - Some(e.to_string()), - ) - }) + // Initialize world list, cache, and database. + let world_list_path = data_dir.join(world_list::WORLD_CONFIG_LIST_NAME); + init_world_list(&world_list_path).await.map_err(|e| { + ErrorObjectOwned::owned( + ErrorCode::InternalError.code(), + "Internal error", + Some(e.to_string()), + ) + })?; + let local_cache_path = cache_dir.join("local_cache"); + init_cache_manager_with_expire(local_cache_path.as_path(), request.global_expire_time) + .await; + crate::database::init_db(data_dir).map_err(|e| { + ErrorObjectOwned::owned( + ErrorCode::InternalError.code(), + "Internal error", + Some(e.to_string()), + ) + })?; + + // Initialize managers (idempotent: only on first call) + if APP_MANAGER.get().is_none() { + let hub_mgr = HubManager::load().map_err(map_manager_err)?; + let app_mgr = AppManager::load().map_err(map_manager_err)?; + let _ = HUB_MANAGER.set(Arc::new(RwLock::new(hub_mgr))); + let _ = APP_MANAGER.set(Arc::new(RwLock::new(app_mgr))); + } + + Ok::(true) })?; module.register_async_method( "check_app_available", |params, _context, _extensions| async move { let request = params.parse::()?; - if let Some(result) = + let result = api::check_app_available(request.hub_uuid, &request.app_data, &request.hub_data) .await - { - Ok(result) - } else { - Err(ErrorObjectOwned::owned( - ErrorCode::ParseError.code(), - "Parse params error", - Some(params.as_str().unwrap_or("None").to_string()), - )) - } + .unwrap_or(false); + Ok::(result) }, )?; module.register_async_method( "get_latest_release", |params, _context, _extensions| async move { - if let Ok(request) = params.parse::() { - if let Some(result) = - api::get_latest_release(request.hub_uuid, &request.app_data, &request.hub_data) - .await - { - Ok(result) - } else { - Err(ErrorObjectOwned::borrowed( - ErrorCode::InvalidParams.code(), - "Invalid params", - None, - )) - } + let request = params.parse::()?; + if let Some(result) = + api::get_latest_release(request.hub_uuid, &request.app_data, &request.hub_data) + .await + { + Ok(result) } else { Err(ErrorObjectOwned::owned( - ErrorCode::ParseError.code(), - "Parse params error", - Some(params.as_str().unwrap_or("None").to_string()), + -32001, + "No release found", + None::, )) } }, )?; module.register_async_method("get_releases", |params, _context, _extensions| async move { - if let Ok(request) = params.parse::() { - if let Some(result) = - api::get_releases(request.hub_uuid, &request.app_data, &request.hub_data).await - { - Ok(result) - } else { - Err(ErrorObjectOwned::borrowed( - ErrorCode::InvalidParams.code(), - "Invalid params", - None, - )) - } + let request = params.parse::()?; + if let Some(result) = + api::get_releases(request.hub_uuid, &request.app_data, &request.hub_data).await + { + Ok(result) } else { Err(ErrorObjectOwned::owned( - ErrorCode::ParseError.code(), - "Parse params error", - Some(params.as_str().unwrap_or("None").to_string()), + -32001, + "No releases found", + None::, )) } })?; + // register_provider: Dynamically register an external provider (e.g., Kotlin hub via HTTP JSON-RPC) + module.register_async_method( + "register_provider", + |params, _context, _extensions| async move { + let request = params.parse::()?; + api::add_outside_provider(request.hub_uuid, request.url); + Ok::(true) + }, + )?; + + // get_download: Get download info for an app's asset. + // After retrieving download URLs from the provider, applies URL replacement + // rules from ExtraHub configs (GLOBAL first, then hub-specific), mirroring + // Kotlin's URLReplace.replaceURL() in the download pipeline. + module.register_async_method("get_download", |params, _context, _extensions| async move { + let request = params.parse::()?; + let mut items = api::get_download( + request.hub_uuid, + &request.app_data, + &request.hub_data, + &request.asset_index, + ) + .await + .ok_or_else(|| ErrorObjectOwned::owned(-32001, "No download info found", None::))?; + + // Load URL-replace rules from ExtraHub configs. + // Priority: hub-specific rule overrides GLOBAL rule. + let db = get_db(); + let global_extra = db.find_extra_hub(GLOBAL_HUB_ID).unwrap_or(None); + let hub_extra = db.find_extra_hub(request.hub_uuid).unwrap_or(None); + + // Apply rules to every download URL in the result. + for item in &mut items { + // Apply GLOBAL rule first (lower priority) + if let Some(ref g) = global_extra { + item.url = apply_url_replace( + &item.url, + g.url_replace_search.as_deref(), + g.url_replace_string.as_deref(), + ); + } + // Apply hub-specific rule second (higher priority, may override) + if let Some(ref h) = hub_extra { + item.url = apply_url_replace( + &item.url, + h.url_replace_search.as_deref(), + h.url_replace_string.as_deref(), + ); + } + } + + Ok::, ErrorObjectOwned>(items) + })?; + module.register_async_method( "get_cloud_config", |params, _context, _extensions| async move { @@ -128,6 +246,619 @@ pub async fn run_server( } }, )?; + + // ======================================================================== + // Downloader RPC Methods + // ======================================================================== + + // Create download task manager with HubDispatchDownloader + let download_config = DownloadConfig::from_env(); + let http_downloader = crate::downloader::create_downloader(&download_config); + let dispatcher = crate::downloader::HubDispatchDownloader::new(http_downloader); + + // Clone dispatcher for task manager (HubDispatchDownloader is cheap to clone via Arc internally) + let task_manager = Arc::new(DownloadTaskManager::new(Box::new(dispatcher.clone()))); + + // download_submit: Submit a single download task + let manager_clone = task_manager.clone(); + module.register_async_method("download_submit", move |params, _context, _extensions| { + let manager = manager_clone.clone(); + async move { + let request = params.parse::()?; + match manager.submit_task_with_options( + request.url, + request.dest_path, + request.headers, + request.cookies, + request.hub_uuid, + ) { + Ok(task_id) => Ok(RpcTaskIdResponse { task_id }), + Err(e) => Err(ErrorObjectOwned::owned( + ErrorCode::InternalError.code(), + "Failed to submit download task", + Some(e.message), + )), + } + } + })?; + + // download_submit_batch: Submit multiple download tasks + let manager_clone = task_manager.clone(); + module.register_async_method( + "download_submit_batch", + move |params, _context, _extensions| { + let manager = manager_clone.clone(); + async move { + let request = params.parse::()?; + let tasks: Vec<(String, String)> = request + .tasks + .into_iter() + .map(|t| (t.url, t.dest_path)) + .collect(); + + match manager.submit_batch(tasks) { + Ok(task_ids) => Ok(RpcTaskIdsResponse { task_ids }), + Err(e) => Err(ErrorObjectOwned::owned( + ErrorCode::InternalError.code(), + "Failed to submit batch download tasks", + Some(e.message), + )), + } + } + }, + )?; + + // download_get_status: Get status of a download task + let manager_clone = task_manager.clone(); + module.register_async_method( + "download_get_status", + move |params, _context, _extensions| { + let manager = manager_clone.clone(); + async move { + let request = params.parse::()?; + match manager.get_task(request.task_id) { + Ok(task_info) => Ok(task_info), + Err(e) => Err(ErrorObjectOwned::owned( + ErrorCode::InvalidParams.code(), + "Task not found", + Some(e.message), + )), + } + } + }, + )?; + + // download_wait_for_change: Long-polling for task state change + let manager_clone = task_manager.clone(); + module.register_async_method( + "download_wait_for_change", + move |params, _context, _extensions| { + let manager = manager_clone.clone(); + async move { + let request = params.parse::()?; + let timeout = Duration::from_secs(request.timeout_seconds); + + match manager.wait_for_change(request.task_id, timeout).await { + Ok(task_info) => Ok(task_info), + Err(e) => Err(ErrorObjectOwned::owned( + ErrorCode::InvalidParams.code(), + "Failed to wait for task change", + Some(e.message), + )), + } + } + }, + )?; + + // download_cancel: Cancel a download task + let manager_clone = task_manager.clone(); + module.register_async_method("download_cancel", move |params, _context, _extensions| { + let manager = manager_clone.clone(); + async move { + let request = params.parse::()?; + match manager.cancel_task(request.task_id) { + Ok(_) => Ok(true), + Err(e) => Err(ErrorObjectOwned::owned( + ErrorCode::InternalError.code(), + "Failed to cancel task", + Some(e.message), + )), + } + } + })?; + + // download_pause: Pause a download task + let manager_clone = task_manager.clone(); + module.register_async_method("download_pause", move |params, _context, _extensions| { + let manager = manager_clone.clone(); + async move { + let request = params.parse::()?; + match manager.pause_task(request.task_id).await { + Ok(_) => Ok(true), + Err(e) => Err(ErrorObjectOwned::owned( + ErrorCode::InternalError.code(), + "Failed to pause task", + Some(e.message), + )), + } + } + })?; + + // download_resume: Resume a paused download task + let manager_clone = task_manager.clone(); + module.register_async_method("download_resume", move |params, _context, _extensions| { + let manager = manager_clone.clone(); + async move { + let request = params.parse::()?; + match manager.resume_task(request.task_id).await { + Ok(_) => Ok(true), + Err(e) => Err(ErrorObjectOwned::owned( + ErrorCode::InternalError.code(), + "Failed to resume task", + Some(e.message), + )), + } + } + })?; + + // download_get_capabilities: Get downloader capabilities + let manager_clone = task_manager.clone(); + module.register_method( + "download_get_capabilities", + move |_, _context, _extensions| { + let caps = manager_clone.get_capabilities(); + Ok::<_, ErrorObjectOwned>(caps.clone()) + }, + )?; + + // download_get_all_tasks: Get all tasks + let manager_clone = task_manager.clone(); + module.register_async_method("download_get_all_tasks", move |_, _context, _extensions| { + let manager = manager_clone.clone(); + async move { + Ok::(RpcTasksResponse { + tasks: manager.get_all_tasks(), + }) + } + })?; + + // download_get_active_tasks: Get active tasks + let manager_clone = task_manager.clone(); + module.register_async_method( + "download_get_active_tasks", + move |_, _context, _extensions| { + let manager = manager_clone.clone(); + async move { + Ok::(RpcTasksResponse { + tasks: manager.get_active_tasks(), + }) + } + }, + )?; + + // download_get_tasks_by_state: Get tasks by state + let manager_clone = task_manager.clone(); + module.register_async_method( + "download_get_tasks_by_state", + move |params, _context, _extensions| { + let manager = manager_clone.clone(); + async move { + let request = params.parse::()?; + Ok::(RpcTasksResponse { + tasks: manager.get_tasks_by_state(request.state), + }) + } + }, + )?; + + // register_downloader: Register an external downloader for a hub_uuid + let dispatcher_clone = dispatcher.clone(); + module.register_async_method( + "register_downloader", + move |params, _context, _extensions| { + let dispatcher = dispatcher_clone.clone(); + async move { + let request = params.parse::()?; + let external_downloader = Box::new(crate::downloader::ExternalRpcDownloader::new( + request.rpc_url.to_string(), + )); + dispatcher.register(request.hub_uuid, external_downloader); + Ok::(true) + } + }, + )?; + + // unregister_downloader: Unregister an external downloader for a hub_uuid + let dispatcher_clone = dispatcher.clone(); + module.register_async_method( + "unregister_downloader", + move |params, _context, _extensions| { + let dispatcher = dispatcher_clone.clone(); + async move { + let request = params.parse::()?; + dispatcher.unregister(request.hub_uuid); + Ok::(true) + } + }, + )?; + + // ======================================================================== + // App Manager RPC Methods + // ======================================================================== + + // manager_get_apps: Get all saved apps + module.register_async_method("manager_get_apps", |_, _, _| async move { + let mgr = get_app_manager().ok_or_else(manager_not_init_err)?; + let apps = mgr.read().await.get_saved_apps().await; + Ok::, ErrorObjectOwned>(apps) + })?; + + // manager_save_app: Insert or update an app record + module.register_async_method("manager_save_app", |params, _, _| async move { + let request = params.parse::()?; + let mgr = get_app_manager().ok_or_else(manager_not_init_err)?; + let saved = mgr + .write() + .await + .save_app(request.record) + .await + .map_err(map_manager_err)?; + Ok::(saved) + })?; + + // manager_delete_app: Delete an app by record id + module.register_async_method("manager_delete_app", |params, _, _| async move { + let request = params.parse::()?; + let mgr = get_app_manager().ok_or_else(manager_not_init_err)?; + let deleted = mgr + .write() + .await + .remove_app(&request.record_id) + .await + .map_err(map_manager_err)?; + Ok::(deleted) + })?; + + // manager_get_app_status: Get AppStatus for a specific app + module.register_async_method("manager_get_app_status", |params, _, _| async move { + let request = params.parse::()?; + let mgr = get_app_manager().ok_or_else(manager_not_init_err)?; + let status = mgr.write().await.get_app_status(&request.record_id).await; + Ok::(status) + })?; + + // manager_set_virtual_apps: Set installed (virtual) apps from Android + module.register_async_method("manager_set_virtual_apps", |params, _, _| async move { + let request = params.parse::()?; + let mgr = get_app_manager().ok_or_else(manager_not_init_err)?; + mgr.read().await.set_virtual_apps(request.apps).await; + Ok::(true) + })?; + + // manager_renew_all: Trigger a full update check for all apps + module.register_async_method("manager_renew_all", |_, _, _| async move { + let app_mgr = get_app_manager().ok_or_else(manager_not_init_err)?; + let hub_mgr = get_hub_manager().ok_or_else(manager_not_init_err)?; + let hubs = hub_mgr.read().await.get_hub_list().await; + app_mgr.read().await.renew_all(&hubs, None).await; + Ok::(true) + })?; + + // manager_check_invalid_applications: Return record IDs of apps whose configured + // hub UUIDs are all unknown (no valid hub found). Mirrors Kotlin's + // AppManager.check_invalid_applications logic. + module.register_async_method("manager_check_invalid_applications", |_, _, _| async move { + let app_mgr = get_app_manager().ok_or_else(manager_not_init_err)?; + let hub_mgr = get_hub_manager().ok_or_else(manager_not_init_err)?; + let hubs = hub_mgr.read().await.get_hub_list().await; + let known_uuids: Vec = hubs.into_iter().map(|h| h.uuid).collect(); + let mgr = app_mgr.read().await; + let invalid_ids = mgr.check_invalid_applications(&known_uuids).await; + // Notify Kotlin UI about each invalid app so it can update status. + for record_id in &invalid_ids { + if let Some(app) = mgr.get_app(record_id).await { + notification::notify_if_registered(notification::ManagerEvent::AppStatusChanged { + record_id: record_id.clone(), + app_id: app.app_id.clone(), + old_status: crate::manager::app_status::AppStatus::AppLatest, + new_status: crate::manager::app_status::AppStatus::AppInactive, + }) + .await; + } + } + Ok::, ErrorObjectOwned>(invalid_ids) + })?; + + // ======================================================================== + // Hub Manager RPC Methods + // ======================================================================== + + // manager_get_hubs: Get all hubs + module.register_async_method("manager_get_hubs", |_, _, _| async move { + let mgr = get_hub_manager().ok_or_else(manager_not_init_err)?; + let hubs = mgr.read().await.get_hub_list().await; + Ok::, ErrorObjectOwned>(hubs) + })?; + + // manager_save_hub: Insert or update a hub + module.register_async_method("manager_save_hub", |params, _, _| async move { + let request = params.parse::()?; + let mgr = get_hub_manager().ok_or_else(manager_not_init_err)?; + mgr.write() + .await + .upsert_hub(request.record) + .await + .map_err(map_manager_err)?; + Ok::(true) + })?; + + // manager_update_hub_auth: Replace the auth map for a hub and persist. + module.register_async_method("manager_update_hub_auth", |params, _, _| async move { + let request = params.parse::()?; + let mgr = get_hub_manager().ok_or_else(manager_not_init_err)?; + let updated = mgr + .read() + .await + .update_auth(&request.hub_uuid, request.auth) + .await + .map_err(map_manager_err)?; + Ok::(updated) + })?; + + // manager_delete_hub: Delete a hub by UUID + module.register_async_method("manager_delete_hub", |params, _, _| async move { + let request = params.parse::()?; + let mgr = get_hub_manager().ok_or_else(manager_not_init_err)?; + let deleted = mgr + .write() + .await + .remove_hub(&request.hub_uuid) + .await + .map_err(map_manager_err)?; + Ok::(deleted) + })?; + + // manager_hub_ignore_app: Add or remove an app from a hub's ignore list + module.register_async_method("manager_hub_ignore_app", |params, _, _| async move { + let request = params.parse::()?; + let mgr = get_hub_manager().ok_or_else(manager_not_init_err)?; + let mut guard = mgr.write().await; + let mut hub = guard.get_hub(&request.hub_uuid).await.ok_or_else(|| { + ErrorObjectOwned::owned( + ErrorCode::InvalidParams.code(), + "Hub not found", + None::, + ) + })?; + if request.ignore { + if !hub.user_ignore_app_id_list.contains(&request.app_id) { + hub.user_ignore_app_id_list.push(request.app_id); + } + } else { + hub.user_ignore_app_id_list + .retain(|id| id != &request.app_id); + } + guard.upsert_hub(hub).await.map_err(map_manager_err)?; + Ok::(true) + })?; + + // manager_set_applications_mode: Enable/disable auto app discovery for a hub + module.register_async_method("manager_set_applications_mode", |params, _, _| async move { + let request = params.parse::()?; + let mgr = get_hub_manager().ok_or_else(manager_not_init_err)?; + let mut guard = mgr.write().await; + let mut hub = guard.get_hub(&request.hub_uuid).await.ok_or_else(|| { + ErrorObjectOwned::owned( + ErrorCode::InvalidParams.code(), + "Hub not found", + None::, + ) + })?; + hub.applications_mode = if request.enable { 1 } else { 0 }; + guard.upsert_hub(hub).await.map_err(map_manager_err)?; + Ok::(true) + })?; + + // ======================================================================== + // ExtraHub RPC Methods + // ======================================================================== + + // manager_get_extra_hubs: Get all extra hub configs + module.register_async_method("manager_get_extra_hubs", |_, _, _| async move { + let extra_hubs = crate::database::get_db() + .load_extra_hubs() + .map_err(map_manager_err)?; + Ok::, ErrorObjectOwned>(extra_hubs) + })?; + + // manager_save_extra_hub: Insert or update an extra hub config + module.register_async_method("manager_save_extra_hub", |params, _, _| async move { + let request = params.parse::()?; + crate::database::get_db() + .upsert_extra_hub(&request.record) + .map_err(map_manager_err)?; + Ok::(true) + })?; + + // manager_delete_extra_hub: Delete an extra hub by id + module.register_async_method("manager_delete_extra_hub", |params, _, _| async move { + let request = params.parse::()?; + let deleted = crate::database::get_db() + .delete_extra_hub(&request.id) + .map_err(map_manager_err)?; + Ok::(deleted) + })?; + + // ======================================================================== + // Android API / Notification Registration RPC Methods + // ======================================================================== + + // register_android_api: Register Kotlin's Android API callback URL + module.register_async_method("register_android_api", |params, _, _| async move { + let request = params.parse::()?; + android_api::set_android_api(request.url); + Ok::(true) + })?; + + // register_notification: Register Kotlin's notification callback URL + module.register_async_method("register_notification", |params, _, _| async move { + let request = params.parse::()?; + notification::set_notification(request.url); + Ok::(true) + })?; + + // ======================================================================== + // ExtraApp RPC Methods + // ======================================================================== + + // manager_get_extra_app_by_app_id: Get ExtraApp record by app_id map + module.register_async_method( + "manager_get_extra_app_by_app_id", + |params, _, _| async move { + let request = params.parse::()?; + let record = crate::database::get_db() + .get_extra_app_by_app_id(&request.app_id) + .map_err(map_manager_err)?; + Ok::, ErrorObjectOwned>( + record, + ) + }, + )?; + + // manager_save_extra_app: Insert or update an ExtraApp record + module.register_async_method("manager_save_extra_app", |params, _, _| async move { + let request = params.parse::()?; + crate::database::get_db() + .upsert_extra_app(&request.record) + .map_err(map_manager_err)?; + Ok::(true) + })?; + + // manager_delete_extra_app: Delete an ExtraApp by database id + module.register_async_method("manager_delete_extra_app", |params, _, _| async move { + let request = params.parse::()?; + let deleted = crate::database::get_db() + .delete_extra_app(&request.id) + .map_err(map_manager_err)?; + Ok::(deleted) + })?; + + // ======================================================================== + // Cloud Config Manager RPC Methods + // ======================================================================== + + // cloud_config_init: Initialise or re-initialise the CloudConfigGetter with an API URL + module.register_async_method("cloud_config_init", |params, _, _| async move { + let request = params.parse::()?; + let getter = CloudConfigGetter::new(request.api_url); + let _ = CLOUD_CONFIG_GETTER.set(Arc::new(RwLock::new(getter))); + Ok::(true) + })?; + + // cloud_config_renew: Download and cache the latest cloud config + module.register_async_method("cloud_config_renew", |_, _, _| async move { + let getter = get_cloud_config_getter().ok_or_else(|| { + ErrorObjectOwned::owned( + ErrorCode::InternalError.code(), + "CloudConfigGetter not initialised. Call cloud_config_init first.", + None::, + ) + })?; + getter.read().await.renew().await.map_err(map_manager_err)?; + Ok::(true) + })?; + + // cloud_config_get_app_list: Return all available app configs from cache + module.register_async_method("cloud_config_get_app_list", |_, _, _| async move { + let getter = get_cloud_config_getter().ok_or_else(|| { + ErrorObjectOwned::owned( + ErrorCode::InternalError.code(), + "CloudConfigGetter not initialised.", + None::, + ) + })?; + let list = getter.read().await.app_config_list().await; + Ok::, ErrorObjectOwned>(list) + })?; + + // cloud_config_get_hub_list: Return all available hub configs from cache + module.register_async_method("cloud_config_get_hub_list", |_, _, _| async move { + let getter = get_cloud_config_getter().ok_or_else(|| { + ErrorObjectOwned::owned( + ErrorCode::InternalError.code(), + "CloudConfigGetter not initialised.", + None::, + ) + })?; + let list = getter.read().await.hub_config_list().await; + Ok::, ErrorObjectOwned>(list) + })?; + + // cloud_config_apply_app: Apply a cloud app config by UUID + module.register_async_method("cloud_config_apply_app", |params, _, _| async move { + let request = params.parse::()?; + let getter = get_cloud_config_getter().ok_or_else(|| { + ErrorObjectOwned::owned( + ErrorCode::InternalError.code(), + "CloudConfigGetter not initialised.", + None::, + ) + })?; + let app_mgr = get_app_manager().ok_or_else(manager_not_init_err)?; + let hub_mgr = get_hub_manager().ok_or_else(manager_not_init_err)?; + getter + .read() + .await + .apply_app_config( + &request.uuid, + &mut *app_mgr.write().await, + &mut *hub_mgr.write().await, + ) + .await + .map_err(map_manager_err)?; + Ok::(true) + })?; + + // cloud_config_apply_hub: Apply a cloud hub config by UUID + module.register_async_method("cloud_config_apply_hub", |params, _, _| async move { + let request = params.parse::()?; + let getter = get_cloud_config_getter().ok_or_else(|| { + ErrorObjectOwned::owned( + ErrorCode::InternalError.code(), + "CloudConfigGetter not initialised.", + None::, + ) + })?; + let hub_mgr = get_hub_manager().ok_or_else(manager_not_init_err)?; + getter + .read() + .await + .apply_hub_config(&request.uuid, &mut *hub_mgr.write().await) + .await + .map_err(map_manager_err)?; + Ok::(true) + })?; + + // cloud_config_renew_all: Bulk-update all installed apps/hubs from cloud + module.register_async_method("cloud_config_renew_all", |_, _, _| async move { + let getter = get_cloud_config_getter().ok_or_else(|| { + ErrorObjectOwned::owned( + ErrorCode::InternalError.code(), + "CloudConfigGetter not initialised.", + None::, + ) + })?; + let app_mgr = get_app_manager().ok_or_else(manager_not_init_err)?; + let hub_mgr = get_hub_manager().ok_or_else(manager_not_init_err)?; + getter + .read() + .await + .renew_all_from_cloud(&mut *app_mgr.write().await, &mut *hub_mgr.write().await) + .await + .map_err(map_manager_err)?; + Ok::(true) + })?; + let addr = server.local_addr()?; let handle = server.start(module); tokio::spawn(handle.clone().stopped()); @@ -387,4 +1118,138 @@ mod tests { assert!(!config.hub_config_list.is_empty()); handle.stop().unwrap(); } + + // ======================================================================== + // WebSocket and message size tests + // ======================================================================== + + use jsonrpsee::ws_client::WsClientBuilder; + use serial_test::serial; + + /// Generate a random ASCII string of given byte length (not compressible). + /// Uses printable ASCII range (0x21-0x7e) to avoid JSON escape overhead. + fn generate_random_string(size: usize) -> String { + use rand::RngExt; + let mut rng = rand::rng(); + let mut buf = vec![0u8; size]; + rng.fill(&mut buf[..]); + // Map each byte to printable ASCII (0x21..=0x7e, 94 chars), avoid '"' and '\\' + for b in buf.iter_mut() { + *b = match (*b % 92) + 0x21 { + b'"' => b'a', + b'\\' => b'b', + v => v, + }; + } + // SAFETY: all bytes are valid ASCII + unsafe { String::from_utf8_unchecked(buf) } + } + + /// Helper: start a minimal RPC server with "echo_data" method for size testing. + async fn start_test_server_with_config(max_size: u32) -> (String, ServerHandle) { + let config = ServerConfig::builder() + .max_request_body_size(max_size) + .max_response_body_size(max_size) + .build(); + let server = jsonrpsee::server::Server::builder() + .set_config(config) + .build("127.0.0.1:0".parse::().unwrap()) + .await + .unwrap(); + let mut module = RpcModule::new(()); + module + .register_method("echo_data", |params, _, _| { + let data: String = params.one()?; + Ok::(data) + }) + .unwrap(); + module.register_method("ping", |_, _, _| "pong").unwrap(); + let addr = server.local_addr().unwrap(); + let handle = server.start(module); + (format!("ws://{}", addr), handle) + } + + #[tokio::test] + async fn test_ws_client_connection() { + let (url, handle) = run_server("", Arc::new(AtomicBool::new(true))) + .await + .unwrap(); + let ws_url = url.replace("http://", "ws://"); + let max_size = get_max_message_size(); + let client = WsClientBuilder::default() + .max_request_size(max_size) + .max_response_size(max_size) + .build(&ws_url) + .await + .unwrap(); + let response: String = client.request("ping", rpc_params![]).await.unwrap(); + assert_eq!(response, "pong"); + handle.stop().unwrap(); + } + + #[tokio::test] + async fn test_ws_large_message_50mb() { + const SIZE: usize = 50 * 1024 * 1024; // 50MB + let max_size = DEFAULT_MAX_SIZE; + let (ws_url, handle) = start_test_server_with_config(max_size).await; + let client = WsClientBuilder::default() + .max_request_size(max_size) + .max_response_size(max_size) + .build(&ws_url) + .await + .unwrap(); + let data = generate_random_string(SIZE); + let response: String = client + .request("echo_data", rpc_params![&data]) + .await + .unwrap(); + assert_eq!(response.len(), data.len()); + handle.stop().unwrap(); + } + + #[tokio::test] + #[serial] + async fn test_ws_env_var_limits_message_size() { + const ONE_MB: u32 = 1024 * 1024; + const TWO_MB: usize = 2 * 1024 * 1024; + + // Set env var to 1MB limit + // SAFETY: This test is marked #[serial] so no other tests run concurrently + unsafe { std::env::set_var("GETTER_WS_MAX_MESSAGE_SIZE", ONE_MB.to_string()) }; + assert_eq!(get_max_message_size(), ONE_MB); + + let (url, handle) = run_server("", Arc::new(AtomicBool::new(true))) + .await + .unwrap(); + let ws_url = url.replace("http://", "ws://"); + + // Client allows large messages, but server should reject + let client = WsClientBuilder::default() + .max_request_size(u32::MAX) + .max_response_size(u32::MAX) + .build(&ws_url) + .await + .unwrap(); + + // Verify ping still works (small message) + let response: String = client.request("ping", rpc_params![]).await.unwrap(); + assert_eq!(response, "pong"); + + // Send 2MB data via init request, should be rejected by 1MB server limit + let large_data = generate_random_string(TWO_MB); + let params = RpcInitRequest { + data_path: &large_data, + cache_path: "/tmp/cache", + global_expire_time: 3600, + }; + let response: Result = client.request("init", params).await; + assert!( + response.is_err(), + "2MB request should be rejected by 1MB server limit" + ); + + handle.stop().unwrap(); + // SAFETY: This test is marked #[serial] so no other tests run concurrently + unsafe { std::env::remove_var("GETTER_WS_MAX_MESSAGE_SIZE") }; + } } diff --git a/src/utils/http.rs b/src/utils/http.rs index 923e792..a1894fb 100644 --- a/src/utils/http.rs +++ b/src/utils/http.rs @@ -152,6 +152,7 @@ fn https_config() -> Result, HttpsCo .with_safe_default_protocol_versions() .map_err(|e| HttpsConfigError { error: Box::new(e) })? .with_platform_verifier() + .map_err(|e| HttpsConfigError { error: Box::new(e) })? .with_no_client_auth(); } #[cfg(all(feature = "webpki-roots", not(feature = "rustls-platform-verifier")))] @@ -240,7 +241,7 @@ mod tests { #[tokio::test] async fn test_https_get() { - let url = "https://example.com".parse().unwrap(); + let url = "https://github.com".parse().unwrap(); let result = https_get(url, &HashMap::new()).await; assert!(result.is_ok()); assert!(!result.unwrap().body.unwrap().is_empty()); @@ -255,14 +256,14 @@ mod tests { #[tokio::test] async fn test_https_get_status() { - let url = "https://httpstat.us/418".parse().unwrap(); + let url = "https://mockhttp.org/status/418".parse().unwrap(); let result = https_get(url, &HashMap::new()).await; assert_eq!(result.unwrap().status, 418); } #[tokio::test] async fn test_https_head() { - let url = "https://example.com".parse().unwrap(); + let url = "https://github.com".parse().unwrap(); let result = https_head(url, &HashMap::new()).await; assert!(result.is_ok()); assert!(result.unwrap().body.is_none()); diff --git a/src/utils/versioning.rs b/src/utils/versioning.rs index d88a80f..c25d819 100644 --- a/src/utils/versioning.rs +++ b/src/utils/versioning.rs @@ -1,6 +1,6 @@ use std::cmp::Ordering; -use version_compare; +use libversion_sys; use once_cell::sync::Lazy; use regex::Regex; @@ -26,29 +26,28 @@ impl Version { } pub fn get_valid_version(&self) -> Option { - let version_string = VERSION_NUMBER_STRICT_MATCH_REGEX + VERSION_NUMBER_STRICT_MATCH_REGEX .find(&self.string) .or_else(|| VERSION_NUMBER_MATCH_REGEX.find(&self.string)) - .map(|match_str| match_str.as_str()); - version_string.and_then(|version_string| { - version_compare::Version::from(version_string).map(|v| v.to_string()) - }) + .map(|match_str| match_str.as_str().to_string()) } } impl PartialEq for Version { fn eq(&self, other: &Self) -> bool { - let version = version_compare::Version::from(self.string.as_str()); - let other_version = version_compare::Version::from(other.string.as_str()); - version == other_version + match (self.get_valid_version(), other.get_valid_version()) { + (Some(v1), Some(v2)) => libversion_sys::compare(&v1, &v2) == Ordering::Equal, + _ => false, + } } } impl PartialOrd for Version { fn partial_cmp(&self, other: &Self) -> Option { - let version = version_compare::Version::from(self.string.as_str()); - let other_version = version_compare::Version::from(other.string.as_str()); - version.partial_cmp(&other_version) + match (self.get_valid_version(), other.get_valid_version()) { + (Some(v1), Some(v2)) => Some(libversion_sys::compare(&v1, &v2)), + _ => None, + } } } diff --git a/src/websdk/cloud_rules/cloud_rules_manager.rs b/src/websdk/cloud_rules/cloud_rules_manager.rs index ef1ef2d..17aaebf 100644 --- a/src/websdk/cloud_rules/cloud_rules_manager.rs +++ b/src/websdk/cloud_rules/cloud_rules_manager.rs @@ -30,7 +30,7 @@ impl CloudRules { } } - pub fn get_config_list(&self) -> ConfigListViewer { + pub fn get_config_list(&self) -> ConfigListViewer<'_> { self._config_list .as_ref() .map_or_else(ConfigListViewer::default, |config_list| { diff --git a/src/websdk/cloud_rules/data/app_item.rs b/src/websdk/cloud_rules/data/app_item.rs index 5eb1de8..dca31c9 100644 --- a/src/websdk/cloud_rules/data/app_item.rs +++ b/src/websdk/cloud_rules/data/app_item.rs @@ -20,7 +20,7 @@ use std::collections::HashMap; /// } /// ``` -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub struct AppItem { #[serde(rename = "base_version")] pub base_version: i32, @@ -38,7 +38,7 @@ pub struct AppItem { pub info: AppInfo, } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub struct AppInfo { #[serde(rename = "name")] pub name: String, diff --git a/src/websdk/cloud_rules/data/config_list.rs b/src/websdk/cloud_rules/data/config_list.rs index 52708ba..886fcfe 100644 --- a/src/websdk/cloud_rules/data/config_list.rs +++ b/src/websdk/cloud_rules/data/config_list.rs @@ -32,7 +32,7 @@ pub struct ConfigList { } impl ConfigList { - pub fn viewer(&self) -> ConfigListViewer { + pub fn viewer(&self) -> ConfigListViewer<'_> { ConfigListViewer { app_config_list: self.app_config_list.iter().collect(), hub_config_list: self.hub_config_list.iter().collect(), diff --git a/src/websdk/cloud_rules/data/hub_item.rs b/src/websdk/cloud_rules/data/hub_item.rs index 88f86de..c8c43b7 100644 --- a/src/websdk/cloud_rules/data/hub_item.rs +++ b/src/websdk/cloud_rules/data/hub_item.rs @@ -14,11 +14,12 @@ use serde::{Deserialize, Serialize}; /// } /// target_check_api: "" /// api_keywords: [] +/// auth_keywords: [] /// app_url_templates": [] /// } /// ``` -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub struct HubItem { #[serde(rename = "base_version", default)] pub base_version: i32, @@ -35,6 +36,11 @@ pub struct HubItem { #[serde(rename = "api_keywords", default)] pub api_keywords: Vec, + /// Auth parameter keys required by this hub (e.g. ["token"] for GitHub). + /// Used by the UI to provide autocomplete suggestions when editing hub auth. + #[serde(rename = "auth_keywords", default)] + pub auth_keywords: Vec, + #[serde(rename = "app_url_templates", default)] pub app_url_templates: Vec, @@ -42,7 +48,7 @@ pub struct HubItem { pub target_check_api: Option, } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub struct Info { #[serde(rename = "hub_name", default)] pub hub_name: String, @@ -85,9 +91,33 @@ mod tests { assert_eq!(hub_item.info.hub_icon_url, Some("".to_string())); assert_eq!(hub_item.target_check_api, Some("".to_string())); assert_eq!(hub_item.api_keywords, ["owner", "repo"]); + // Old config without auth_keywords deserializes to empty vec. + assert_eq!(hub_item.auth_keywords, Vec::::new()); assert_eq!( hub_item.app_url_templates[0], "https://github.com/%owner/%repo/" ); } + + #[test] + fn test_hub_item_auth_keywords() { + let json = r#" +{ + "base_version": 5, + "config_version": 3, + "uuid": "fd9b2602-62c5-4d55-bd1e-0d6537714ca0", + "info": { "hub_name": "GitHub" }, + "api_keywords": ["owner", "repo"], + "auth_keywords": ["token"], + "app_url_templates": ["https://github.com/%owner/%repo/"] +} + "#; + let hub_item: HubItem = serde_json::from_str(json).unwrap(); + assert_eq!(hub_item.auth_keywords, ["token"]); + + // Round-trip serialization preserves auth_keywords. + let serialized = serde_json::to_string(&hub_item).unwrap(); + let hub_item2: HubItem = serde_json::from_str(&serialized).unwrap(); + assert_eq!(hub_item2.auth_keywords, ["token"]); + } } diff --git a/src/websdk/repo/api.rs b/src/websdk/repo/api.rs index 27a0c39..044176a 100644 --- a/src/websdk/repo/api.rs +++ b/src/websdk/repo/api.rs @@ -8,6 +8,7 @@ use super::provider::outside_rpc::OutsideProvider; use super::provider::{self, add_provider}; use crate::cache::get_cache_manager; use crate::cache::manager::GroupType; +use crate::rpc::data::DownloadItemData; use crate::utils::json::{bytes_to_json, json_to_bytes}; use std::collections::HashMap; @@ -141,6 +142,21 @@ pub async fn get_releases<'a>( .unwrap_or(None) } +pub async fn get_download<'a>( + uuid: &str, + app_data: &AppDataMap<'a>, + hub_data: &HubDataMap<'a>, + asset_index: &[i32], +) -> Option> { + let data_map = DataMap { app_data, hub_data }; + let fin = FIn::new(data_map, None); + if let Some(fout) = provider::get_download(uuid, &fin, asset_index).await { + fout.result.ok() + } else { + None + } +} + pub fn add_outside_provider(uuid: &str, url: &str) { let provider = OutsideProvider { uuid: uuid.to_string(), diff --git a/src/websdk/repo/provider.rs b/src/websdk/repo/provider.rs index 80bc16d..220367b 100644 --- a/src/websdk/repo/provider.rs +++ b/src/websdk/repo/provider.rs @@ -15,6 +15,7 @@ use self::github::GitHubProvider; use self::gitlab::GitLabProvider; use self::lsposed_repo::LsposedRepoProvider; use super::data::release::ReleaseData; +use crate::rpc::data::DownloadItemData; type ProviderMap = HashMap<&'static str, Arc>; @@ -84,3 +85,15 @@ pub async fn get_releases<'a>(uuid: &str, fin: &FIn<'a>) -> Option( + uuid: &str, + fin: &FIn<'a>, + asset_index: &[i32], +) -> Option>> { + if let Some(provider) = get_provider(uuid) { + Some(provider.get_download(fin, asset_index).await) + } else { + None + } +} diff --git a/src/websdk/repo/provider/base_provider.rs b/src/websdk/repo/provider/base_provider.rs index eccbc98..2dcec20 100644 --- a/src/websdk/repo/provider/base_provider.rs +++ b/src/websdk/repo/provider/base_provider.rs @@ -10,6 +10,7 @@ use std::{ }; use super::super::data::release::*; +use crate::rpc::data::DownloadItemData; pub type HubDataMap<'a> = BTreeMap<&'a str, &'a str>; pub type AppDataMap<'a> = BTreeMap<&'a str, &'a str>; @@ -44,6 +45,7 @@ pub enum FunctionType { CheckAppAvailable, GetLatestRelease, GetReleases, + GetDownload, } pub struct FIn<'a> { @@ -95,10 +97,7 @@ impl FOut { pub fn new_empty() -> Self { FOut { - result: Err(Box::new(std::io::Error::new( - std::io::ErrorKind::Other, - "no data", - ))), + result: Err(Box::new(std::io::Error::other("no data"))), cached_map: None, } } @@ -160,6 +159,12 @@ pub trait BaseProvider { } async fn get_releases(&self, fin: &FIn) -> FOut>; + + /// Get download info for an app's asset. Default returns empty (not supported). + /// Only OutsideProvider (Kotlin hubs) needs to implement this. + async fn get_download(&self, _fin: &FIn, _asset_index: &[i32]) -> FOut> { + FOut::new_empty() + } } pub trait BaseProviderExt: BaseProvider { @@ -231,6 +236,7 @@ mod tests { FunctionType::CheckAppAvailable => "check_app_available", FunctionType::GetLatestRelease => "get_latest_release", FunctionType::GetReleases => "get_releases", + FunctionType::GetDownload => "get_download", }; let id_map = data_map.app_data; vec![format!( diff --git a/src/websdk/repo/provider/fdroid.rs b/src/websdk/repo/provider/fdroid.rs index 63bf850..9e2f276 100644 --- a/src/websdk/repo/provider/fdroid.rs +++ b/src/websdk/repo/provider/fdroid.rs @@ -51,6 +51,7 @@ impl BaseProvider for FDroidProvider { url, id_map[ANDROID_APP_TYPE] )], FunctionType::GetLatestRelease | FunctionType::GetReleases => vec![api_url.to_string()], + FunctionType::GetDownload => vec![], } } @@ -142,15 +143,14 @@ impl FDroidProvider { } } Ok(Event::Text(e)) => { - if let Ok(e) = e.unescape() { - let text = e.into_owned(); + if let Ok(text) = e.decode() { match current_tag.as_str() { - "version" => version_number += &text, - "changelog" => changelog += &text, + "version" => version_number += text.as_ref(), + "changelog" => changelog += text.as_ref(), "versionCode" | "nativecode" => { extra.insert(current_tag.clone(), text.to_string()); } - "apkname" => file_name += &text, + "apkname" => file_name += text.as_ref(), _ => (), } } @@ -159,7 +159,7 @@ impl FDroidProvider { }; } let download_url = format!("{}/{}", url, file_name); - let file_type = file_name.split('.').last().unwrap_or("").to_string(); + let file_type = file_name.split('.').next_back().unwrap_or("").to_string(); let extra = if extra.is_empty() { None } else { Some(extra) }; Ok(ReleaseData { @@ -199,10 +199,9 @@ impl FDroidProvider { } } Ok(Event::Text(e)) => { - if let Ok(e) = e.unescape() { - let text = e.into_owned(); + if let Ok(text) = e.decode() { if current_tag.as_str() == "changelog" { - changelog += &text + changelog += text.as_ref() } } } diff --git a/src/websdk/repo/provider/github.rs b/src/websdk/repo/provider/github.rs index 5393cde..f7e8bc8 100644 --- a/src/websdk/repo/provider/github.rs +++ b/src/websdk/repo/provider/github.rs @@ -36,7 +36,7 @@ impl GitHubProvider { if let Some(token) = token { map.insert("Authorization".to_string(), format!("Bearer {}", token)); } - return map; + map } } @@ -59,6 +59,7 @@ impl BaseProvider for GitHubProvider { "{}/repos/{}/{}/releases", GITHUB_API_URL, id_map["owner"], id_map["repo"] )], + FunctionType::GetDownload => vec![], } } diff --git a/src/websdk/repo/provider/gitlab.rs b/src/websdk/repo/provider/gitlab.rs index 48f22cf..d3a67ad 100644 --- a/src/websdk/repo/provider/gitlab.rs +++ b/src/websdk/repo/provider/gitlab.rs @@ -104,6 +104,7 @@ impl BaseProvider for GitLabProvider { "{}/{}/{}/releases", GITLAB_API_URL, id_map["owner"], id_map["repo"] )], + FunctionType::GetDownload => vec![], } } diff --git a/src/websdk/repo/provider/outside_rpc.rs b/src/websdk/repo/provider/outside_rpc.rs index 6855e19..ddfcc79 100644 --- a/src/websdk/repo/provider/outside_rpc.rs +++ b/src/websdk/repo/provider/outside_rpc.rs @@ -1,6 +1,7 @@ use super::super::data::release::*; use super::base_provider::*; use crate::rpc::client::*; +use crate::rpc::data::DownloadItemData; use async_trait::async_trait; pub struct OutsideProvider { @@ -86,6 +87,28 @@ impl BaseProvider for OutsideProvider { cached_map: None, } } + + async fn get_download(&self, fin: &FIn, asset_index: &[i32]) -> FOut> { + FOut { + result: match Client::new(&self.url).map(|client| async move { + client + .get_download( + &self.uuid, + fin.data_map.app_data.to_owned(), + fin.data_map.hub_data.to_owned(), + asset_index, + ) + .await + }) { + Ok(result) => match result.await { + Ok(result) => Ok(result), + Err(e) => Err(Box::new(e)), + }, + Err(e) => Err(Box::new(e)), + }, + cached_map: None, + } + } } #[cfg(test)] diff --git a/tests/script/cargo_test.sh b/tests/script/cargo_test.sh index 3665170..086b9ed 100755 --- a/tests/script/cargo_test.sh +++ b/tests/script/cargo_test.sh @@ -3,32 +3,23 @@ # function for running a command and printing it # if command failed, print notification and exit function run { - echo "Running: $@" - "$@" - local status=$? - if [ $status -ne 0 ]; then - echo "Command failed: $@" - exit $status - fi + echo "Running: $@" + "$@" + local status=$? + if [ $status -ne 0 ]; then + echo "Command failed: $@" + exit $status + fi } -echo "Building with default features" -run cargo build --verbose echo "Testing with default features" -run cargo test --verbose +run cargo test -echo "Building with each feature individually" -for feature in "rustls-platform-verifier" "webpki-roots" "native-tokio"; do - echo "Building with feature: $feature" - run cargo build --verbose --no-default-features --features "$feature" -done echo "Testing with each feature individually" for feature in "rustls-platform-verifier" "webpki-roots" "native-tokio"; do - echo "Testing with feature: $feature" - run cargo test --verbose --no-default-features --features "$feature" + echo "Testing with feature: $feature" + run cargo test --no-default-features --features "$feature" done -echo "Building with all features" -run cargo build --verbose --all-features echo "Testing with all features" -run cargo test --verbose --all-features +run cargo test --all-features