diff --git a/Cargo.lock b/Cargo.lock index 7df875a9..7e0d5ec1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,32 +2,12 @@ # It is not intended for manual editing. version = 3 -[[package]] -name = "addr2line" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" -dependencies = [ - "gimli", -] - [[package]] name = "adler2" version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" -[[package]] -name = "aes" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" -dependencies = [ - "cfg-if 1.0.0", - "cipher", - "cpufeatures", -] - [[package]] name = "aho-corasick" version = "1.1.3" @@ -261,27 +241,6 @@ version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" -[[package]] -name = "backtrace" -version = "0.3.75" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" -dependencies = [ - "addr2line", - "cfg-if 1.0.0", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", - "windows-targets 0.52.6", -] - -[[package]] -name = "base64" -version = "0.21.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" - [[package]] name = "base64" version = "0.22.1" @@ -334,12 +293,6 @@ version = "1.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9134a6ef01ce4b366b50689c94f82c14bc72bc5d0386829828a2e2752ef7958c" -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - [[package]] name = "byteorder-lite" version = "0.1.0" @@ -352,25 +305,6 @@ version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" -[[package]] -name = "bzip2" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49ecfb22d906f800d4fe833b6282cf4dc1c298f5057ca0b5445e5c209735ca47" -dependencies = [ - "bzip2-sys", -] - -[[package]] -name = "bzip2-sys" -version = "0.1.13+1.0.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225bff33b2141874fe80d71e07d6eec4f85c5c216453dd96388240f96e1acc14" -dependencies = [ - "cc", - "pkg-config", -] - [[package]] name = "cbindgen" version = "0.28.0" @@ -413,6 +347,12 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + [[package]] name = "chrono" version = "0.4.41" @@ -427,16 +367,6 @@ dependencies = [ "windows-link", ] -[[package]] -name = "cipher" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" -dependencies = [ - "crypto-common", - "inout", -] - [[package]] name = "clap" version = "4.5.38" @@ -447,16 +377,6 @@ dependencies = [ "clap_derive", ] -[[package]] -name = "clap-verbosity-flag" -version = "2.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34c77f67047557f62582784fd7482884697731b2932c7d37ced54bce2312e1e2" -dependencies = [ - "clap", - "log", -] - [[package]] name = "clap_builder" version = "4.5.38" @@ -508,35 +428,6 @@ dependencies = [ "crossbeam-utils", ] -[[package]] -name = "console" -version = "0.15.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" -dependencies = [ - "encode_unicode", - "libc", - "once_cell", - "unicode-width", - "windows-sys 0.59.0", -] - -[[package]] -name = "constant_time_eq" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6" - -[[package]] -name = "core-foundation" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "core-foundation" version = "0.10.0" @@ -562,21 +453,6 @@ dependencies = [ "libc", ] -[[package]] -name = "crc" -version = "3.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675" -dependencies = [ - "crc-catalog", -] - -[[package]] -name = "crc-catalog" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" - [[package]] name = "crc32fast" version = "1.4.2" @@ -665,12 +541,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "deflate64" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da692b8d1080ea3045efaab14434d40468c3d8657e42abddfffca87b428f4c1b" - [[package]] name = "deranged" version = "0.4.0" @@ -702,27 +572,6 @@ dependencies = [ "syn 2.0.101", ] -[[package]] -name = "derive_more" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05" -dependencies = [ - "derive_more-impl", -] - -[[package]] -name = "derive_more-impl" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", - "unicode-xid", -] - [[package]] name = "dialog" version = "0.3.0" @@ -747,7 +596,6 @@ checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", "crypto-common", - "subtle", ] [[package]] @@ -788,21 +636,6 @@ version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" -[[package]] -name = "encode_unicode" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" - -[[package]] -name = "encoding_rs" -version = "0.8.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" -dependencies = [ - "cfg-if 1.0.0", -] - [[package]] name = "enum-flags" version = "0.4.0" @@ -814,19 +647,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "env_logger" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580" -dependencies = [ - "humantime", - "is-terminal", - "log", - "regex", - "termcolor", -] - [[package]] name = "equivalent" version = "1.0.2" @@ -912,21 +732,6 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared", -] - -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - [[package]] name = "form_urlencoded" version = "1.2.1" @@ -938,16 +743,16 @@ dependencies = [ [[package]] name = "fs_at" -version = "0.1.10" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "982f82cc75107eef84f417ad6c53ae89bf65b561937ca4a3b3b0fd04d0aa2425" +checksum = "14af6c9694ea25db25baa2a1788703b9e7c6648dcaeeebeb98f7561b5384c036" dependencies = [ "aligned", "cfg-if 1.0.0", "cvt", "libc", "nix", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -990,33 +795,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "futures-sink" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" - -[[package]] -name = "futures-task" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" - -[[package]] -name = "futures-util" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" -dependencies = [ - "futures-core", - "futures-io", - "futures-task", - "memchr", - "pin-project-lite", - "pin-utils", - "slab", -] - [[package]] name = "generic-array" version = "0.14.7" @@ -1045,11 +823,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" dependencies = [ "cfg-if 1.0.0", - "js-sys", "libc", "r-efi", "wasi 0.14.2+wasi-0.2.4", - "wasm-bindgen", ] [[package]] @@ -1062,12 +838,6 @@ dependencies = [ "weezl", ] -[[package]] -name = "gimli" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" - [[package]] name = "glob" version = "0.3.2" @@ -1086,25 +856,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "h2" -version = "0.3.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" -dependencies = [ - "bytes", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http 0.2.12", - "indexmap", - "slab", - "tokio", - "tokio-util", - "tracing", -] - [[package]] name = "hashbrown" version = "0.15.3" @@ -1129,32 +880,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" -[[package]] -name = "hermit-abi" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f154ce46856750ed433c8649605bf7ed2de3bc35fd9d2a9f30cddd873c80cb08" - -[[package]] -name = "hmac" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" -dependencies = [ - "digest", -] - -[[package]] -name = "http" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - [[package]] name = "http" version = "1.3.1" @@ -1166,72 +891,12 @@ dependencies = [ "itoa", ] -[[package]] -name = "http-body" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" -dependencies = [ - "bytes", - "http 0.2.12", - "pin-project-lite", -] - [[package]] name = "httparse" version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" -[[package]] -name = "httpdate" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" - -[[package]] -name = "humantime" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b112acc8b3adf4b107a8ec20977da0273a8c386765a3ec0229bd500a1443f9f" - -[[package]] -name = "hyper" -version = "0.14.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" -dependencies = [ - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "h2", - "http 0.2.12", - "http-body", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "socket2", - "tokio", - "tower-service", - "tracing", - "want", -] - -[[package]] -name = "hyper-tls" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" -dependencies = [ - "bytes", - "hyper", - "native-tls", - "tokio", - "tokio-native-tls", -] - [[package]] name = "iana-time-zone" version = "0.1.63" @@ -1395,60 +1060,12 @@ dependencies = [ "hashbrown", ] -[[package]] -name = "indicatif" -version = "0.17.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235" -dependencies = [ - "console", - "number_prefix", - "portable-atomic", - "unicode-width", - "web-time", -] - -[[package]] -name = "inout" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" -dependencies = [ - "generic-array", -] - -[[package]] -name = "ipnet" -version = "2.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" - -[[package]] -name = "is-terminal" -version = "0.4.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" -dependencies = [ - "hermit-abi 0.5.1", - "libc", - "windows-sys 0.59.0", -] - [[package]] name = "is_terminal_polyfill" version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - [[package]] name = "itoa" version = "1.0.15" @@ -1583,27 +1200,6 @@ dependencies = [ "log", ] -[[package]] -name = "lzma-rs" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "297e814c836ae64db86b36cf2a557ba54368d03f6afcd7d947c266692f71115e" -dependencies = [ - "byteorder", - "crc", -] - -[[package]] -name = "lzma-sys" -version = "0.1.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fda04ab3764e6cde78b9974eec4f779acaba7c4e84b36eca3cf77c581b85d27" -dependencies = [ - "cc", - "libc", - "pkg-config", -] - [[package]] name = "memchr" version = "2.7.4" @@ -1619,12 +1215,6 @@ dependencies = [ "libc", ] -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - [[package]] name = "miniz_oxide" version = "0.8.8" @@ -1635,45 +1225,6 @@ dependencies = [ "simd-adler32", ] -[[package]] -name = "mio" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" -dependencies = [ - "libc", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.52.0", -] - -[[package]] -name = "mtzip" -version = "4.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6c6cb282455a4f5495359ca3f6b6c3c95b3f5bbb598fce39fbd7ed7877eb9aa" -dependencies = [ - "cfg-if 1.0.0", - "derive_more", - "flate2", -] - -[[package]] -name = "native-tls" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" -dependencies = [ - "libc", - "log", - "openssl", - "openssl-probe", - "openssl-sys", - "schannel", - "security-framework", - "security-framework-sys", - "tempfile", -] - [[package]] name = "neon" version = "1.0.0" @@ -1702,12 +1253,13 @@ dependencies = [ [[package]] name = "nix" -version = "0.26.4" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" +checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.9.1", "cfg-if 1.0.0", + "cfg_aliases", "libc", ] @@ -1777,71 +1329,12 @@ dependencies = [ "libc", ] -[[package]] -name = "number_prefix" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" - -[[package]] -name = "object" -version = "0.36.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" -dependencies = [ - "memchr", -] - [[package]] name = "once_cell" version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" -[[package]] -name = "openssl" -version = "0.10.72" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da" -dependencies = [ - "bitflags 2.9.1", - "cfg-if 1.0.0", - "foreign-types", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] - -[[package]] -name = "openssl-probe" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" - -[[package]] -name = "openssl-sys" -version = "0.9.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e145e1651e858e820e4860f7b9c5e169bc1d8ce1c86043be79fa7b7634821847" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] - [[package]] name = "os_info" version = "3.11.0" @@ -1859,16 +1352,6 @@ version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" -[[package]] -name = "pbkdf2" -version = "0.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" -dependencies = [ - "digest", - "hmac", -] - [[package]] name = "percent-encoding" version = "2.3.1" @@ -1925,19 +1408,13 @@ checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f" dependencies = [ "cfg-if 1.0.0", "concurrent-queue", - "hermit-abi 0.4.0", + "hermit-abi", "pin-project-lite", "rustix 0.38.44", "tracing", "windows-sys 0.59.0", ] -[[package]] -name = "portable-atomic" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e" - [[package]] name = "potential_utf" version = "0.1.2" @@ -2049,12 +1526,6 @@ dependencies = [ "getrandom 0.3.3", ] -[[package]] -name = "ranges" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "350932f7e8b2d0333d2537f42671a837473576c00e128fbee84c819ea08783b5" - [[package]] name = "rayon" version = "1.10.0" @@ -2117,57 +1588,16 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "remove_dir_all" -version = "0.8.2" -source = "git+https://github.com/caesay/remove_dir_all.git#c98142b9150c53e6c5f56e752d2bf93433f2e207" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "808cc0b475acf76adf36f08ca49429b12aad9f678cb56143d5b3cb49b9a1dd08" dependencies = [ "cfg-if 1.0.0", "cvt", "fs_at", "libc", - "log", "normpath", - "same-file", - "windows-sys 0.52.0", -] - -[[package]] -name = "reqwest" -version = "0.11.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" -dependencies = [ - "base64 0.21.7", - "bytes", - "encoding_rs", - "futures-core", - "futures-util", - "h2", - "http 0.2.12", - "http-body", - "hyper", - "hyper-tls", - "ipnet", - "js-sys", - "log", - "mime", - "native-tls", - "once_cell", - "percent-encoding", - "pin-project-lite", - "rustls-pemfile 1.0.4", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper", - "system-configuration", - "tokio", - "tokio-native-tls", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "winreg", + "windows-sys 0.59.0", ] [[package]] @@ -2184,30 +1614,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "ripunzip" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82ed68561d7508a84261a74b852832ea0cf156f7a0202a6797a08c7296a88da0" -dependencies = [ - "anyhow", - "clap", - "clap-verbosity-flag", - "env_logger", - "indicatif", - "itertools", - "log", - "progress-streams", - "ranges", - "rayon", - "regex", - "reqwest", - "tempfile", - "thiserror 1.0.69", - "wildmatch", - "zip 2.4.2", -] - [[package]] name = "rpassword" version = "2.1.0" @@ -2219,12 +1625,6 @@ dependencies = [ "winapi 0.2.8", ] -[[package]] -name = "rustc-demangle" -version = "0.1.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" - [[package]] name = "rustix" version = "0.38.44" @@ -2266,15 +1666,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "rustls-pemfile" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" -dependencies = [ - "base64 0.21.7", -] - [[package]] name = "rustls-pemfile" version = "2.2.0" @@ -2325,38 +1716,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "schannel" -version = "0.1.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" -dependencies = [ - "windows-sys 0.59.0", -] - -[[package]] -name = "security-framework" -version = "2.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" -dependencies = [ - "bitflags 2.9.1", - "core-foundation 0.9.4", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - -[[package]] -name = "security-framework-sys" -version = "2.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "semver" version = "1.0.26" @@ -2410,18 +1769,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_urlencoded" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" -dependencies = [ - "form_urlencoded", - "itoa", - "ryu", - "serde", -] - [[package]] name = "sha1" version = "0.10.6" @@ -2497,16 +1844,6 @@ version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9" -[[package]] -name = "socket2" -version = "0.5.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f5fd57c80058a56cf5c777ab8a126398ece8e442983605d280a44ce79d0edef" -dependencies = [ - "libc", - "windows-sys 0.52.0", -] - [[package]] name = "stable_deref_trait" version = "1.2.0" @@ -2586,12 +1923,6 @@ dependencies = [ "syn 2.0.101", ] -[[package]] -name = "sync_wrapper" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" - [[package]] name = "synstructure" version = "0.13.2" @@ -2603,27 +1934,6 @@ dependencies = [ "syn 2.0.101", ] -[[package]] -name = "system-configuration" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" -dependencies = [ - "bitflags 1.3.2", - "core-foundation 0.9.4", - "system-configuration-sys", -] - -[[package]] -name = "system-configuration-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "tempfile" version = "3.20.0" @@ -2740,44 +2050,6 @@ dependencies = [ "zerovec", ] -[[package]] -name = "tokio" -version = "1.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2513ca694ef9ede0fb23fe71a4ee4107cb102b9dc1930f6d0fd77aae068ae165" -dependencies = [ - "backtrace", - "bytes", - "libc", - "mio", - "pin-project-lite", - "socket2", - "windows-sys 0.52.0", -] - -[[package]] -name = "tokio-native-tls" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" -dependencies = [ - "native-tls", - "tokio", -] - -[[package]] -name = "tokio-util" -version = "0.7.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66a539a9ad6d5d281510d5bd368c973d636c02dbf8a67300bfb6b950696ad7df" -dependencies = [ - "bytes", - "futures-core", - "futures-sink", - "pin-project-lite", - "tokio", -] - [[package]] name = "toml" version = "0.5.11" @@ -2828,12 +2100,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfb942dfe1d8e29a7ee7fcbde5bd2b9a25fb89aa70caea2eba3bee836ff41076" -[[package]] -name = "tower-service" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" - [[package]] name = "tracing" version = "0.1.41" @@ -2849,15 +2115,6 @@ name = "tracing-core" version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" -dependencies = [ - "once_cell", -] - -[[package]] -name = "try-lock" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "ts-rs" @@ -2894,18 +2151,6 @@ version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" -[[package]] -name = "unicode-width" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" - -[[package]] -name = "unicode-xid" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" - [[package]] name = "untrusted" version = "0.9.0" @@ -2918,12 +2163,12 @@ version = "3.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7a3e9af6113ecd57b8c63d3cd76a385b2e3881365f1f489e54f49801d0c83ea" dependencies = [ - "base64 0.22.1", + "base64", "flate2", "log", "percent-encoding", "rustls", - "rustls-pemfile 2.2.0", + "rustls-pemfile", "rustls-pki-types", "ureq-proto", "utf-8", @@ -2936,8 +2181,8 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fadf18427d33828c311234884b7ba2afb57143e6e7e69fda7ee883b624661e36" dependencies = [ - "base64 0.22.1", - "http 1.3.1", + "base64", + "http", "httparse", "log", ] @@ -2999,12 +2244,6 @@ version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "943ce29a8a743eb10d6082545d861b24f9d1b160b7d741e0f2cdf726bec909c5" -[[package]] -name = "vcpkg" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" - [[package]] name = "velopack" version = "0.0.0-local" @@ -3036,7 +2275,7 @@ dependencies = [ "uuid", "windows", "xml", - "zip 3.0.0", + "zip", ] [[package]] @@ -3047,12 +2286,13 @@ dependencies = [ "bitflags 2.9.1", "chrono", "clap", - "core-foundation 0.10.0", + "core-foundation", "core-foundation-sys", "derivative", "dialog", "enum-flags", "file-rotate", + "flate2", "fs_extra", "glob", "image", @@ -3062,16 +2302,16 @@ dependencies = [ "log", "log-panics", "memmap2", - "mtzip", "normpath", "ntest", "os_info", "pretty-bytes-rust", "pretty_assertions", + "progress-streams", "rand", + "rayon", "regex", "remove_dir_all", - "ripunzip", "same-file", "semver", "sha1_smol", @@ -3089,8 +2329,7 @@ dependencies = [ "windows", "winres", "winsafe", - "zerofrom", - "zip 3.0.0", + "zip", "zstd", ] @@ -3158,15 +2397,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "want" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" -dependencies = [ - "try-lock", -] - [[package]] name = "wasi" version = "0.10.0+wasi-snapshot-preview1" @@ -3269,16 +2499,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "web-time" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - [[package]] name = "webpki-roots" version = "0.26.11" @@ -3314,12 +2534,6 @@ version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53a85b86a771b1c87058196170769dd264f66c0782acf1ae6cc51bfd64b39082" -[[package]] -name = "wildmatch" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68ce1ab1f8c62655ebe1350f589c61e505cf94d385bc6a12899442d9081e71fd" - [[package]] name = "winapi" version = "0.2.8" @@ -3465,15 +2679,6 @@ dependencies = [ "windows-link", ] -[[package]] -name = "windows-sys" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" -dependencies = [ - "windows-targets 0.48.5", -] - [[package]] name = "windows-sys" version = "0.52.0" @@ -3492,21 +2697,6 @@ dependencies = [ "windows-targets 0.52.6", ] -[[package]] -name = "windows-targets" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" -dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", -] - [[package]] name = "windows-targets" version = "0.52.6" @@ -3548,12 +2738,6 @@ dependencies = [ "windows-link", ] -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" - [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" @@ -3566,12 +2750,6 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" - [[package]] name = "windows_aarch64_msvc" version = "0.52.6" @@ -3584,12 +2762,6 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" -[[package]] -name = "windows_i686_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" - [[package]] name = "windows_i686_gnu" version = "0.52.6" @@ -3614,12 +2786,6 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" -[[package]] -name = "windows_i686_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" - [[package]] name = "windows_i686_msvc" version = "0.52.6" @@ -3632,12 +2798,6 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" - [[package]] name = "windows_x86_64_gnu" version = "0.52.6" @@ -3650,12 +2810,6 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" - [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" @@ -3668,12 +2822,6 @@ version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" - [[package]] name = "windows_x86_64_msvc" version = "0.52.6" @@ -3695,16 +2843,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "winreg" -version = "0.50.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" -dependencies = [ - "cfg-if 1.0.0", - "windows-sys 0.48.0", -] - [[package]] name = "winres" version = "0.1.12" @@ -3750,15 +2888,6 @@ version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a62ce76d9b56901b19a74f19431b0d8b3bc7ca4ad685a746dfd78ca8f4fc6bda" -[[package]] -name = "xz2" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "388c44dc09d76f1536602ead6d325eb532f5c122f17782bd57fb47baeeb767e2" -dependencies = [ - "lzma-sys", -] - [[package]] name = "yansi" version = "1.0.1" @@ -3811,9 +2940,9 @@ dependencies = [ [[package]] name = "zerofrom" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" dependencies = [ "zerofrom-derive", ] @@ -3835,20 +2964,6 @@ name = "zeroize" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" -dependencies = [ - "zeroize_derive", -] - -[[package]] -name = "zeroize_derive" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.101", -] [[package]] name = "zerotrie" @@ -3883,36 +2998,6 @@ dependencies = [ "syn 2.0.101", ] -[[package]] -name = "zip" -version = "2.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fabe6324e908f85a1c52063ce7aa26b68dcb7eb6dbc83a2d148403c9bc3eba50" -dependencies = [ - "aes", - "arbitrary", - "bzip2", - "constant_time_eq", - "crc32fast", - "crossbeam-utils", - "deflate64", - "displaydoc", - "flate2", - "getrandom 0.3.3", - "hmac", - "indexmap", - "lzma-rs", - "memchr", - "pbkdf2", - "sha1", - "thiserror 2.0.12", - "time 0.3.41", - "xz2", - "zeroize", - "zopfli", - "zstd", -] - [[package]] name = "zip" version = "3.0.0" diff --git a/Cargo.toml b/Cargo.toml index 57e82db1..3f2bcfaf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -54,7 +54,7 @@ strum = { version = "0.27", features = ["derive"] } file-rotate = "0.8" simple-stopwatch = "0.1" enum-flags = "0.4" -remove_dir_all = { git = "https://github.com/caesay/remove_dir_all.git", features = ["log"] } +remove_dir_all = "1.0" sha1 = "0.10" sha2 = "0.10" sha1_smol = "1.0" @@ -84,9 +84,12 @@ core-foundation = "0.10" core-foundation-sys = "0.8" uuid = { version = "1.13.1", features = ["v4", "fast-rng", "macro-diagnostics"] } walkdir = "2.5" -mtzip = "=4.0.2" -ripunzip = "=2.0.1" -zerofrom = "=0.1.5" +rayon = "1.6" +progress-streams = "1.1" +flate2 = { version = "1.0", default-features = false } +# mtzip = "=4.0.2" +# ripunzip = "=2.0.1" +# zerofrom = "=0.1.5" # default to small, optimized workspace release binaries [profile.release] diff --git a/src/bins/Cargo.toml b/src/bins/Cargo.toml index 8284b10b..6bdbd77d 100644 --- a/src/bins/Cargo.toml +++ b/src/bins/Cargo.toml @@ -67,9 +67,9 @@ zstd.workspace = true zip.workspace = true walkdir.workspace = true sha1_smol.workspace = true -mtzip.workspace = true -ripunzip.workspace = true -zerofrom.workspace = true +rayon.workspace = true +progress-streams.workspace = true +flate2.workspace = true [target.'cfg(target_os="linux")'.dependencies] waitpid-any.workspace = true diff --git a/src/bins/src/commands/patch.rs b/src/bins/src/commands/patch.rs index c8cf4d14..92a54eb5 100644 --- a/src/bins/src/commands/patch.rs +++ b/src/bins/src/commands/patch.rs @@ -1,12 +1,10 @@ +use crate::shared::fastzip; use anyhow::{anyhow, bail, Result}; -use mtzip::level::CompressionLevel; -use ripunzip::{NullProgressReporter, UnzipEngine, UnzipOptions}; use std::{ collections::HashSet, fs, io, path::{Path, PathBuf}, }; -use walkdir::WalkDir; pub fn zstd_patch_single, P2: AsRef, P3: AsRef>(old_file: P1, patch_file: P2, output_file: P3) -> Result<()> { let old_file = old_file.as_ref(); @@ -53,22 +51,6 @@ fn fio_highbit64(v: u64) -> u32 { return count; } -fn zip_extract, P2: AsRef>(archive_file: P1, target_dir: P2) -> Result<()> { - let target_dir = target_dir.as_ref().to_path_buf(); - let file = fs::File::open(archive_file)?; - let engine = UnzipEngine::for_file(file)?; - let null_progress = Box::new(NullProgressReporter {}); - let options = UnzipOptions { - filename_filter: None, - progress_reporter: null_progress, - output_directory: Some(target_dir), - password: None, - single_threaded: false, - }; - engine.unzip(options)?; - Ok(()) -} - pub fn delta, P2: AsRef, P3: AsRef>( old_file: P1, delta_files: Vec<&PathBuf>, @@ -98,7 +80,7 @@ pub fn delta, P2: AsRef, P3: AsRef>( info!("Extracting base package for delta patching: {}", temp_dir.to_string_lossy()); let work_dir = temp_dir.join("_work"); fs::create_dir_all(&work_dir)?; - zip_extract(&old_file, &work_dir)?; + fastzip::extract_to_directory(&old_file, &work_dir, None)?; info!("Base package extracted. {} delta packages to apply.", delta_files.len()); @@ -106,9 +88,9 @@ pub fn delta, P2: AsRef, P3: AsRef>( info!("{}: extracting apply delta patch: {}", i, delta_file.to_string_lossy()); let delta_dir = temp_dir.join(format!("delta_{}", i)); fs::create_dir_all(&delta_dir)?; - zip_extract(delta_file, &delta_dir)?; + fastzip::extract_to_directory(&delta_file, &delta_dir, None)?; - let delta_relative_paths = enumerate_files_relative(&delta_dir); + let delta_relative_paths = fastzip::enumerate_files_relative(&delta_dir); let mut visited_paths = HashSet::new(); // apply all the zsdiff patches for files which exist in both the delta and the base package @@ -160,7 +142,7 @@ pub fn delta, P2: AsRef, P3: AsRef>( } // anything in the work dir which was not visited is an old / deleted file and should be removed - let workdir_relative_paths = enumerate_files_relative(&work_dir); + let workdir_relative_paths = fastzip::enumerate_files_relative(&work_dir); for relative_path in &workdir_relative_paths { if !visited_paths.contains(relative_path) { let file_to_delete = work_dir.join(relative_path); @@ -172,32 +154,12 @@ pub fn delta, P2: AsRef, P3: AsRef>( info!("All delta patches applied. Asembling output package at: {}", output_file.to_string_lossy()); - let mut zipper = mtzip::ZipArchive::new(); - let workdir_relative_paths = enumerate_files_relative(&work_dir); - for relative_path in &workdir_relative_paths { - zipper - .add_file_from_fs(work_dir.join(&relative_path), relative_path.to_string_lossy().to_string()) - .compression_level(CompressionLevel::fast()) - .done(); - } - let mut file = fs::File::create(&output_file)?; - zipper.write(&mut file)?; + fastzip::compress_directory(&work_dir, &output_file, fastzip::CompressionLevel::fast())?; info!("Successfully applied {} delta patches in {}s.", delta_files.len(), time.s()); Ok(()) } -fn enumerate_files_relative>(dir: P) -> Vec { - WalkDir::new(&dir) - .follow_links(false) - .into_iter() - .filter_map(|entry| entry.ok()) - .filter(|entry| entry.file_type().is_file()) - .map(|entry| entry.path().strip_prefix(&dir).map(|p| p.to_path_buf())) - .filter_map(|entry| entry.ok()) - .collect() -} - // NOTE: this is some code to do checksum verification, but it is not being used // by the current implementation because zstd patching already has checksum verification // diff --git a/src/bins/src/commands/uninstall.rs b/src/bins/src/commands/uninstall.rs index 3c0e0127..aa462583 100644 --- a/src/bins/src/commands/uninstall.rs +++ b/src/bins/src/commands/uninstall.rs @@ -7,49 +7,31 @@ use std::fs::File; pub fn uninstall(locator: &VelopackLocator, delete_self: bool) -> Result<()> { info!("Command: Uninstall"); - + let root_path = locator.get_root_dir(); - fn _uninstall_impl(locator: &VelopackLocator) -> bool { - let root_path = locator.get_root_dir(); - - // the real app could be running at the moment - let _ = shared::force_stop_package(&root_path); + // the real app could be running at the moment + let _ = shared::force_stop_package(&root_path); - let mut finished_with_errors = false; + // run uninstall hook + windows::run_hook(&locator, constants::HOOK_CLI_UNINSTALL, 60); - // run uninstall hook - windows::run_hook(&locator, constants::HOOK_CLI_UNINSTALL, 60); + // remove all shortcuts pointing to the app + windows::remove_all_shortcuts_for_root_dir(&root_path); - // remove all shortcuts pointing to the app - windows::remove_all_shortcuts_for_root_dir(&root_path); + info!("Removing directory '{}'", root_path.to_string_lossy()); + let _ = remove_dir_all::remove_dir_all(&root_path); - info!("Removing directory '{}'", root_path.to_string_lossy()); - if let Err(e) = shared::retry_io(|| remove_dir_all::remove_dir_but_not_self(&root_path)) { - error!("Unable to remove directory, some files may be in use ({}).", e); - finished_with_errors = true; - } - - if let Err(e) = windows::registry::remove_uninstall_entry(&locator) { - error!("Unable to remove uninstall registry entry ({}).", e); - // finished_with_errors = true; - } - - !finished_with_errors + if let Err(e) = windows::registry::remove_uninstall_entry(&locator) { + error!("Unable to remove uninstall registry entry ({}).", e); } // if it returns true, it was a success. // if it returns false, it was completed with errors which the user should be notified of. - let result = _uninstall_impl(&locator); let app_title = locator.get_manifest_title(); - if result { - info!("Finished successfully."); - shared::dialogs::show_info(format!("{} Uninstall", app_title).as_str(), None, "The application was successfully uninstalled."); - } else { - error!("Finished with errors."); - shared::dialogs::show_uninstall_complete_with_errors_dialog(&app_title, None); - } + info!("Finished successfully."); + shared::dialogs::show_info(format!("{} Uninstall", app_title).as_str(), None, "The application was successfully uninstalled."); let dead_path = root_path.join(".dead"); let _ = File::create(dead_path); diff --git a/src/bins/src/shared/fastzip/cloneable_seekable_reader.rs b/src/bins/src/shared/fastzip/cloneable_seekable_reader.rs new file mode 100644 index 00000000..611f676f --- /dev/null +++ b/src/bins/src/shared/fastzip/cloneable_seekable_reader.rs @@ -0,0 +1,168 @@ +// Copyright 2022 Google LLC + +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::{ + io::{Read, Seek, SeekFrom}, + sync::{Arc, Mutex}, +}; + +use super::ripunzip::determine_stream_len; + +struct Inner { + /// The underlying Read implementation. + r: R, + /// The position of r. + pos: u64, + /// The length of r, lazily loaded. + len: Option, +} + +impl Inner { + fn new(r: R) -> Self { + Self { r, pos: 0, len: None } + } + + /// Get the length of the data stream. This is assumed to be constant. + fn len(&mut self) -> std::io::Result { + // Return cached size + if let Some(len) = self.len { + return Ok(len); + } + + let len = determine_stream_len(&mut self.r)?; + self.len = Some(len); + Ok(len) + } + + /// Read into the given buffer, starting at the given offset in the data stream. + fn read_at(&mut self, offset: u64, buf: &mut [u8]) -> std::io::Result { + if offset != self.pos { + self.r.seek(SeekFrom::Start(offset))?; + } + let read_result = self.r.read(buf); + if let Ok(bytes_read) = read_result { + // TODO, once stabilised, use checked_add_signed + self.pos += bytes_read as u64; + } + read_result + } +} + +/// A [`Read`] which refers to its underlying stream by reference count, +/// and thus can be cloned cheaply. It supports seeking; each cloned instance +/// maintains its own pointer into the file, and the underlying instance +/// is seeked prior to each read. +pub(crate) struct CloneableSeekableReader { + /// The wrapper around the Read implementation, shared between threads. + inner: Arc>>, + /// The position of _this_ reader. + pos: u64, +} + +impl Clone for CloneableSeekableReader { + fn clone(&self) -> Self { + Self { inner: self.inner.clone(), pos: self.pos } + } +} + +impl CloneableSeekableReader { + /// Constructor. Takes ownership of the underlying `Read`. + /// You should pass in only streams whose total length you expect + /// to be fixed and unchanging. Odd behavior may occur if the length + /// of the stream changes; any subsequent seeks will not take account + /// of the changed stream length. + pub(crate) fn new(r: R) -> Self { + Self { inner: Arc::new(Mutex::new(Inner::new(r))), pos: 0u64 } + } +} + +impl Read for CloneableSeekableReader { + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + let mut inner = self.inner.lock().unwrap(); + let read_result = inner.read_at(self.pos, buf); + if let Ok(bytes_read) = read_result { + self.pos = self + .pos + .checked_add(bytes_read as u64) + .ok_or(std::io::Error::new(std::io::ErrorKind::InvalidInput, "Read too far forward"))?; + } + read_result + } +} + +impl Seek for CloneableSeekableReader { + fn seek(&mut self, pos: SeekFrom) -> std::io::Result { + let new_pos = match pos { + SeekFrom::Start(pos) => pos, + SeekFrom::End(offset_from_end) => { + let file_len = self.inner.lock().unwrap().len()?; + if -offset_from_end as u64 > file_len { + return Err(std::io::Error::new(std::io::ErrorKind::InvalidInput, "Seek too far backwards")); + } + file_len + .checked_add_signed(offset_from_end) + .ok_or(std::io::Error::new(std::io::ErrorKind::InvalidInput, "Seek too far backward from end"))? + } + SeekFrom::Current(offset_from_pos) => self + .pos + .checked_add_signed(offset_from_pos) + .ok_or(std::io::Error::new(std::io::ErrorKind::InvalidInput, "Seek too far forward from current pos"))?, + }; + self.pos = new_pos; + Ok(new_pos) + } +} + +#[cfg(test)] +mod test { + use super::CloneableSeekableReader; + use std::io::{Cursor, Read, Seek, SeekFrom}; + // use test_log::test; + + #[test] + fn test_cloneable_seekable_reader() -> std::io::Result<()> { + let buf: Vec = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; + let buf = Cursor::new(buf); + let mut reader = CloneableSeekableReader::new(buf); + let mut out = vec![0; 2]; + reader.read_exact(&mut out)?; + assert_eq!(&out, &[0, 1]); + reader.rewind()?; + reader.read_exact(&mut out)?; + assert_eq!(&out, &[0, 1]); + reader.stream_position()?; + reader.read_exact(&mut out)?; + assert_eq!(&out, &[2, 3]); + reader.seek(SeekFrom::End(-2))?; + reader.read_exact(&mut out)?; + assert_eq!(&out, &[8, 9]); + assert!(reader.read_exact(&mut out).is_err()); + Ok(()) + } + + #[test] + fn test_cloned_independent_positions() -> std::io::Result<()> { + let buf: Vec = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; + let buf = Cursor::new(buf); + let mut r1 = CloneableSeekableReader::new(buf); + let mut r2 = r1.clone(); + let mut out = vec![0; 2]; + r1.read_exact(&mut out)?; + assert_eq!(&out, &[0, 1]); + r2.read_exact(&mut out)?; + assert_eq!(&out, &[0, 1]); + r1.read_exact(&mut out)?; + assert_eq!(&out, &[2, 3]); + r2.seek(SeekFrom::End(-2))?; + r2.read_exact(&mut out)?; + assert_eq!(&out, &[8, 9]); + r1.read_exact(&mut out)?; + assert_eq!(&out, &[4, 5]); + Ok(()) + } +} diff --git a/src/bins/src/shared/fastzip/mod.rs b/src/bins/src/shared/fastzip/mod.rs new file mode 100644 index 00000000..e990d899 --- /dev/null +++ b/src/bins/src/shared/fastzip/mod.rs @@ -0,0 +1,86 @@ +#![allow(dead_code)] + +mod cloneable_seekable_reader; +mod mtzip; +mod progress_updater; +mod ripunzip; + +use anyhow::Result; +pub use mtzip::level::CompressionLevel; +use ripunzip::{UnzipEngine, UnzipOptions}; +use std::{ + fs::File, + path::{Path, PathBuf}, +}; +use walkdir::WalkDir; + +/// A trait of types which wish to hear progress updates on the unzip. +pub trait UnzipProgressReporter: Sync { + /// Extraction has begun on a file. + fn extraction_starting(&self, _display_name: &str) {} + /// Extraction has finished on a file. + fn extraction_finished(&self, _display_name: &str) {} + /// The total number of compressed bytes we expect to extract. + fn total_bytes_expected(&self, _expected: u64) {} + /// Some bytes of a file have been decompressed. This is probably + /// the best way to display an overall progress bar. This should eventually + /// add up to the number you're given using `total_bytes_expected`. + /// The 'count' parameter is _not_ a running total - you must add up + /// each call to this function into the running total. + /// It's a bit unfortunate that we give compressed bytes rather than + /// uncompressed bytes, but currently we can't calculate uncompressed + /// bytes without downloading the whole zip file first, which rather + /// defeats the point. + fn bytes_extracted(&self, _count: u64) {} +} + +/// A progress reporter which does nothing. +struct NullProgressReporter; + +impl UnzipProgressReporter for NullProgressReporter {} + +pub fn extract_to_directory<'b, P1: AsRef, P2: AsRef>( + archive_file: P1, + target_dir: P2, + progress_reporter: Option>, +) -> Result<()> { + let target_dir = target_dir.as_ref().to_path_buf(); + let file = File::open(archive_file)?; + let engine = UnzipEngine::for_file(file)?; + let null_progress = Box::new(NullProgressReporter {}); + let options = UnzipOptions { + filename_filter: None, + progress_reporter: progress_reporter.unwrap_or(null_progress), + output_directory: Some(target_dir), + password: None, + single_threaded: false, + }; + engine.unzip(options)?; + Ok(()) +} + +pub fn compress_directory<'b, P1: AsRef, P2: AsRef>(target_dir: P1, output_file: P2, level: CompressionLevel) -> Result<()> { + let target_dir = target_dir.as_ref().to_path_buf(); + let mut zipper = mtzip::ZipArchive::new(); + let workdir_relative_paths = enumerate_files_relative(&target_dir); + for relative_path in &workdir_relative_paths { + zipper + .add_file_from_fs(target_dir.join(&relative_path), relative_path.to_string_lossy().to_string()) + .compression_level(level) + .done(); + } + let mut file = File::create(&output_file)?; + zipper.write_with_rayon(&mut file)?; + Ok(()) +} + +pub fn enumerate_files_relative>(dir: P) -> Vec { + WalkDir::new(&dir) + .follow_links(false) + .into_iter() + .filter_map(|entry| entry.ok()) + .filter(|entry| entry.file_type().is_file()) + .map(|entry| entry.path().strip_prefix(&dir).map(|p| p.to_path_buf())) + .filter_map(|entry| entry.ok()) + .collect() +} diff --git a/src/bins/src/shared/fastzip/mtzip/level.rs b/src/bins/src/shared/fastzip/mtzip/level.rs new file mode 100644 index 00000000..f831fce7 --- /dev/null +++ b/src/bins/src/shared/fastzip/mtzip/level.rs @@ -0,0 +1,126 @@ +//! Compression level + +use core::fmt::Display; +use std::error::Error; + +use flate2::Compression; + +/// Compression level that should be used when compressing a file or data. +/// +/// Current compression providers support only levels from 0 to 9, so these are the only ones being +/// supported. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub struct CompressionLevel(u8); + +impl CompressionLevel { + /// Construct a new value of a compression level setting. + /// + /// The integer value must be less than or equal to 9, otherwise `None` is returned + #[inline] + pub const fn new(level: u8) -> Option { + if level <= 9 { Some(Self(level)) } else { None } + } + + /// Construct a new value of a compression level setting without checking the value. + /// + /// # Safety + /// + /// The value must be a valid supported compression level + #[inline] + pub const unsafe fn new_unchecked(level: u8) -> Self { + Self(level) + } + + /// No compression + #[inline] + pub const fn none() -> Self { + Self(0) + } + + /// Fastest compression + #[inline] + pub const fn fast() -> Self { + Self(1) + } + + /// Balanced level with moderate compression and speed. The raw value is 6. + #[inline] + pub const fn balanced() -> Self { + Self(6) + } + + /// Best compression ratio, comes at a worse performance + #[inline] + pub const fn best() -> Self { + Self(9) + } + + /// Get the compression level as an integer + #[inline] + pub const fn get(self) -> u8 { + self.0 + } +} + +impl Default for CompressionLevel { + /// Equivalent to [`Self::balanced`] + fn default() -> Self { + Self::balanced() + } +} + +/// The number for compression level was invalid +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct InvalidCompressionLevel(u32); + +impl InvalidCompressionLevel { + /// The value which was supplied + pub fn value(self) -> u32 { + self.0 + } +} + +impl Display for InvalidCompressionLevel { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Invalid compression level number: {}", self.0) + } +} + +impl Error for InvalidCompressionLevel {} + +impl From for Compression { + #[inline] + fn from(value: CompressionLevel) -> Self { + Compression::new(value.0.into()) + } +} + +impl TryFrom for CompressionLevel { + type Error = InvalidCompressionLevel; + + fn try_from(value: Compression) -> Result { + let level = value.level(); + Self::new( + level + .try_into() + .map_err(|_| InvalidCompressionLevel(level))?, + ) + .ok_or(InvalidCompressionLevel(level)) + } +} + +impl From for u8 { + #[inline] + fn from(value: CompressionLevel) -> Self { + value.0 + } +} + +impl TryFrom for CompressionLevel { + type Error = InvalidCompressionLevel; + + #[inline] + fn try_from(value: u8) -> Result { + Self::new(value).ok_or(InvalidCompressionLevel(value.into())) + } +} diff --git a/src/bins/src/shared/fastzip/mtzip/mod.rs b/src/bins/src/shared/fastzip/mtzip/mod.rs new file mode 100644 index 00000000..a8ee9b52 --- /dev/null +++ b/src/bins/src/shared/fastzip/mtzip/mod.rs @@ -0,0 +1,433 @@ +//! # mtzip +//! +//! MTZIP (Stands for Multi-Threaded ZIP) is a library for making zip archives while utilising all +//! available performance available with multithreading. The amount of threads can be limited by +//! the user or detected automatically. +//! +//! Example usage: +//! +//! ```ignore +//! # use std::path::Path; +//! # use std::fs::File; +//! use mtzip::ZipArchive; +//! +//! // Creating the zipper that holds data and handles compression +//! let mut zipper = ZipArchive::new(); +//! +//! // Adding a file from filesystem +//! zipper.add_file_from_fs( +//! Path::new("input/test_text_file.txt"), +//! "test_text_file.txt".to_owned(), +//! ); +//! +//! // Adding a file with data from a memory location +//! zipper.add_file_from_memory(b"Hello, world!", "hello_world.txt".to_owned()); +//! +//! // Adding a directory and a file to it +//! zipper.add_directory("test_dir".to_owned()); +//! zipper.add_file_from_fs( +//! Path::new("input/file_that_goes_to_a_dir.txt"), +//! "test_dir/file_that_goes_to_a_dir.txt".to_owned(), +//! ); +//! +//! // Writing to a file +//! // First, open the file +//! let mut file = File::create("output.zip").unwrap(); +//! // Then, write to it +//! zipper.write(&mut file); // Amount of threads is chosen automatically +//! ``` + +use std::{ + borrow::Cow, + io::{Read, Seek, Write}, + num::NonZeroUsize, + panic::{RefUnwindSafe, UnwindSafe}, + path::Path, + sync::{mpsc, Mutex}, +}; + +use level::CompressionLevel; +use rayon::prelude::*; +use zip_archive_parts::{ + data::ZipData, + extra_field::{ExtraField, ExtraFields}, + file::ZipFile, + job::{ZipJob, ZipJobOrigin}, +}; + +pub mod level; +mod platform; +mod zip_archive_parts; + +// TODO: tests, maybe examples + +/// Compression type for the file. Directories always use [`Stored`](CompressionType::Stored). +/// Default is [`Deflate`](CompressionType::Deflate). +#[repr(u16)] +#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)] +pub enum CompressionType { + /// No compression at all, the data is stored as-is. + /// + /// This is used for directories because they have no data (no payload) + Stored = 0, + #[default] + /// Deflate compression, the most common in ZIP files. + Deflate = 8, +} + +/// Builder used to optionally add additional attributes to a file or directory. +/// The default compression type is [`CompressionType::Deflate`] and default compression level is +/// [`CompressionLevel::best`] +#[must_use] +#[derive(Debug)] +pub struct ZipFileBuilder<'a, 'b> { + archive_handle: &'a mut ZipArchive<'b>, + job: ZipJob<'b>, +} + +impl<'a, 'b> ZipFileBuilder<'a, 'b> { + /// Call this when you're done configuring the file entry and it will be added to the job list, + /// or directly into the resulting dataset if it's a directory. Always needs to be called. + pub fn done(self) { + let Self { archive_handle, job } = self; + match &job.data_origin { + ZipJobOrigin::Directory => { + let file = job.into_file().expect("No failing code path"); + archive_handle.push_file(file); + } + _ => archive_handle.push_job(job), + } + } + + /// Read filesystem metadata from filesystem and add the properties to this file. It sets + /// external attributes (as with [`Self::external_attributes`]) and adds extra fields generated + /// with [`ExtraFields::new_from_fs`] + pub fn metadata_from_fs(self, fs_path: &Path) -> std::io::Result { + let metadata = std::fs::metadata(fs_path)?; + let external_attributes = platform::attributes_from_fs(&metadata); + let extra_fields = ExtraFields::new_from_fs(&metadata); + Ok(self.external_attributes(external_attributes).extra_fields(extra_fields)) + } + + /// Add a file comment. + pub fn file_comment(mut self, comment: String) -> Self { + self.job.file_comment = Some(comment); + self + } + + /// Add additional [`ExtraField`]. + pub fn extra_field(mut self, extra_field: ExtraField) -> Self { + self.job.extra_fields.values.push(extra_field); + self + } + + /// Add additional [`ExtraField`]s. + pub fn extra_fields(mut self, extra_fields: impl IntoIterator) -> Self { + self.job.extra_fields.extend(extra_fields); + self + } + + /// Set compression type. Ignored for directories, as they use no compression. + /// + /// Default is [`CompressionType::Deflate`]. + pub fn compression_type(mut self, compression_type: CompressionType) -> Self { + self.job.compression_type = compression_type; + self + } + + /// Set compression level. Ignored for directories, as they use no compression. + /// + /// Default is [`CompressionLevel::best`] + pub fn compression_level(mut self, compression_level: CompressionLevel) -> Self { + self.job.compression_level = compression_level; + self + } + + /// Set external attributes. The format depends on a filesystem and is mostly a legacy + /// mechanism, usually a default value is used if this is not a filesystem source. When a file + /// is added from the filesystem, these attributes will be read and used and the ones set wit + /// hthis method are ignored. + pub fn external_attributes(mut self, external_attributes: u16) -> Self { + self.job.external_attributes = external_attributes; + self + } + + /// Set external file attributes from a filesystem item. Use of this method is discouraged in + /// favor of [`Self::metadata_from_fs`], which also sets extra fields which contain modern + /// filesystem attributes instead of using old 16-bit system-dependent format. + pub fn external_attributes_from_fs(mut self, fs_path: &Path) -> std::io::Result { + let metadata = std::fs::metadata(fs_path)?; + self.job.external_attributes = platform::attributes_from_fs(&metadata); + Ok(self) + } + + #[inline] + fn new(archive: &'a mut ZipArchive<'b>, filename: String, origin: ZipJobOrigin<'b>) -> Self { + Self { + archive_handle: archive, + job: ZipJob { + data_origin: origin, + archive_path: filename, + extra_fields: ExtraFields::default(), + file_comment: None, + external_attributes: platform::default_file_attrs(), + compression_type: CompressionType::Deflate, + compression_level: CompressionLevel::best(), + }, + } + } + + #[inline] + fn new_dir(archive: &'a mut ZipArchive<'b>, filename: String) -> Self { + Self { + archive_handle: archive, + job: ZipJob { + data_origin: ZipJobOrigin::Directory, + archive_path: filename, + extra_fields: ExtraFields::default(), + file_comment: None, + external_attributes: platform::default_dir_attrs(), + compression_type: CompressionType::Deflate, + compression_level: CompressionLevel::best(), + }, + } + } +} + +/// Structure that holds the current state of ZIP archive creation. +/// +/// # Lifetimes +/// +/// Because some of the methods allow supplying borrowed data, the lifetimes are used to indicate +/// that [`Self`](ZipArchive) borrows them. If you only provide owned data, such as +/// [`Vec`](Vec) or [`PathBuf`](std::path::PathBuf), you won't have to worry about lifetimes +/// and can simply use `'static`, if you ever need to specify them in your code. +/// +/// The lifetime `'a` is for the borrowed data passed in +/// [`add_file_from_memory`](Self::add_file_from_memory), +/// [`add_file_from_fs`](Self::add_file_from_fs) and +/// [`add_file_from_reader`](Self::add_file_from_reader) +#[derive(Debug, Default)] +pub struct ZipArchive<'a> { + jobs_queue: Vec>, + data: ZipData, +} + +impl<'a> ZipArchive<'a> { + fn push_job(&mut self, job: ZipJob<'a>) { + self.jobs_queue.push(job); + } + + fn push_file(&mut self, file: ZipFile) { + self.data.files.push(file); + } + + /// Create an empty [`ZipArchive`] + #[inline] + pub fn new() -> Self { + Self::default() + } + + /// Add file from filesystem. + /// + /// Opens the file and reads data from it when [`compress`](Self::compress) is called. + /// + /// ``` + /// # use mtzip::ZipArchive; + /// # use std::path::Path; + /// let mut zipper = ZipArchive::new(); + /// zipper + /// .add_file_from_fs(Path::new("input.txt"), "input.txt".to_owned()) + /// .done(); + /// ``` + #[inline] + pub fn add_file_from_fs(&mut self, fs_path: impl Into>, archived_path: String) -> ZipFileBuilder<'_, 'a> { + ZipFileBuilder::new(self, archived_path, ZipJobOrigin::Filesystem { path: fs_path.into() }) + } + + /// Add file with data from memory. + /// + /// The data can be either borrowed or owned by the [`ZipArchive`] struct to avoid lifetime + /// hell. + /// + /// ``` + /// # use mtzip::ZipArchive; + /// # use std::path::Path; + /// let mut zipper = ZipArchive::new(); + /// let data: &[u8] = "Hello, world!".as_ref(); + /// zipper + /// .add_file_from_memory(data, "hello_world.txt".to_owned()) + /// .done(); + /// ``` + #[inline] + pub fn add_file_from_memory(&mut self, data: impl Into>, archived_path: String) -> ZipFileBuilder<'_, 'a> { + ZipFileBuilder::new(self, archived_path, ZipJobOrigin::RawData(data.into())) + } + + /// Add a file with data from a reader. + /// + /// This method takes any type implementing [`Read`] and allows it to have borrowed data (`'r`) + /// + /// ``` + /// # use mtzip::ZipArchive; + /// # use std::path::Path; + /// let mut zipper = ZipArchive::new(); + /// let data_input = std::io::stdin(); + /// zipper + /// .add_file_from_reader(data_input, "stdin_file.txt".to_owned()) + /// .done(); + /// ``` + #[inline] + pub fn add_file_from_reader( + &mut self, + reader: R, + archived_path: String, + ) -> ZipFileBuilder<'_, 'a> { + ZipFileBuilder::new(self, archived_path, ZipJobOrigin::Reader(Box::new(reader))) + } + + /// Add a directory entry. + /// + /// All directories in the tree should be added. This method does not asssociate any filesystem + /// properties to the entry. + /// + /// ``` + /// # use mtzip::ZipArchive; + /// # use std::path::Path; + /// let mut zipper = ZipArchive::new(); + /// zipper.add_directory("test_dir/".to_owned()).done(); + /// ``` + #[inline] + pub fn add_directory(&mut self, archived_path: String) -> ZipFileBuilder<'_, 'a> { + ZipFileBuilder::new_dir(self, archived_path) + } + + /// Compress contents. Will be done automatically on [`write`](Self::write) call if files were + /// added between last compression and [`write`](Self::write) call. Automatically chooses + /// amount of threads to use based on how much are available. + #[inline] + pub fn compress(&mut self) { + self.compress_with_threads(Self::get_threads()); + } + + /// Compress contents. Will be done automatically on + /// [`write_with_threads`](Self::write_with_threads) call if files were added between last + /// compression and [`write`](Self::write). Allows specifying amount of threads that will be + /// used. + /// + /// Example of getting amount of threads that this library uses in + /// [`compress`](Self::compress): + /// + /// ``` + /// # use std::num::NonZeroUsize; + /// # use mtzip::ZipArchive; + /// # let mut zipper = ZipArchive::new(); + /// let threads = std::thread::available_parallelism() + /// .map(NonZeroUsize::get) + /// .unwrap_or(1); + /// + /// zipper.compress_with_threads(threads); + /// ``` + #[inline] + pub fn compress_with_threads(&mut self, threads: usize) { + if !self.jobs_queue.is_empty() { + self.compress_with_consumer(threads, |zip_data, rx| zip_data.files.extend(rx)) + } + } + + /// Write compressed data to a writer (usually a file). Executes [`compress`](Self::compress) + /// if files were added between last [`compress`](Self::compress) call and this call. + /// Automatically chooses the amount of threads cpu has. + #[inline] + pub fn write(&mut self, writer: &mut W) -> std::io::Result<()> { + self.write_with_threads(writer, Self::get_threads()) + } + + /// Write compressed data to a writer (usually a file). Executes + /// [`compress_with_threads`](Self::compress_with_threads) if files were added between last + /// [`compress`](Self::compress) call and this call. Allows specifying amount of threads that + /// will be used. + /// + /// Example of getting amount of threads that this library uses in [`write`](Self::write): + /// + /// ``` + /// # use std::num::NonZeroUsize; + /// # use mtzip::ZipArchive; + /// # let mut zipper = ZipArchive::new(); + /// let threads = std::thread::available_parallelism() + /// .map(NonZeroUsize::get) + /// .unwrap_or(1); + /// + /// zipper.compress_with_threads(threads); + /// ``` + #[inline] + pub fn write_with_threads(&mut self, writer: &mut W, threads: usize) -> std::io::Result<()> { + if !self.jobs_queue.is_empty() { + self.compress_with_consumer(threads, |zip_data, rx| zip_data.write(writer, rx)) + } else { + self.data.write(writer, std::iter::empty()) + } + } + + /// Starts the compression jobs and passes teh mpsc receiver to teh consumer function, which + /// might either store the data in [`ZipData`] - [`Self::compress_with_threads`]; or write the + /// zip data as soon as it's available - [`Self::write_with_threads`] + fn compress_with_consumer(&mut self, threads: usize, consumer: F) -> T + where + F: FnOnce(&mut ZipData, mpsc::Receiver) -> T, + { + let jobs_drain = Mutex::new(self.jobs_queue.drain(..)); + let jobs_drain_ref = &jobs_drain; + std::thread::scope(|s| { + let rx = { + let (tx, rx) = mpsc::channel(); + for _ in 0..threads { + let thread_tx = tx.clone(); + s.spawn(move || loop { + let next_job = jobs_drain_ref.lock().unwrap().next_back(); + if let Some(job) = next_job { + thread_tx.send(job.into_file().unwrap()).unwrap(); + } else { + break; + } + }); + } + rx + }; + consumer(&mut self.data, rx) + }) + } + + fn get_threads() -> usize { + std::thread::available_parallelism().map(NonZeroUsize::get).unwrap_or(1) + } +} + +impl ZipArchive<'_> { + /// Compress contents and use rayon for parallelism. + /// + /// Uses whatever thread pool this function is executed in. + /// + /// If you want to limit the amount of threads to be used, use + /// [`rayon::ThreadPoolBuilder::num_threads`] and either set it as a global pool, or + /// [`rayon::ThreadPool::install`] the call to this method in it. + pub fn compress_with_rayon(&mut self) { + if !self.jobs_queue.is_empty() { + let files_par_iter = self.jobs_queue.par_drain(..).map(|job| job.into_file().unwrap()); + self.data.files.par_extend(files_par_iter) + } + } + + /// Write the contents to a writer. + /// + /// This method uses teh same thread logic as [`Self::compress_with_rayon`], refer to its + /// documentation for details on how to control the parallelism and thread allocation. + pub fn write_with_rayon(&mut self, writer: &mut W) -> std::io::Result<()> { + if !self.jobs_queue.is_empty() { + let files_par_iter = self.jobs_queue.par_drain(..).map(|job| job.into_file().unwrap()); + self.data.write_rayon(writer, files_par_iter) + } else { + self.data.write_rayon(writer, rayon::iter::empty()) + } + } +} diff --git a/src/bins/src/shared/fastzip/mtzip/platform/mod.rs b/src/bins/src/shared/fastzip/mtzip/platform/mod.rs new file mode 100644 index 00000000..618d2317 --- /dev/null +++ b/src/bins/src/shared/fastzip/mtzip/platform/mod.rs @@ -0,0 +1,96 @@ +//! Platform-specific stuff + +use std::fs::Metadata; + +#[cfg(target_os = "windows")] +/// OS - Windows, id 11 per Info-Zip spec +/// Specification version 6.2 +pub(crate) const VERSION_MADE_BY: u16 = (11 << 8) + 62; + +#[cfg(target_os = "macos")] +/// OS - MacOS darwin, id 19 +/// Specification version 6.2 +pub(crate) const VERSION_MADE_BY: u16 = (19 << 8) + 62; + +#[cfg(not(any(target_os = "windows", target_os = "macos")))] +// Fallback +/// OS - Unix assumed, id 3 +/// Specification version 6.2 +pub(crate) const VERSION_MADE_BY: u16 = (3 << 8) + 62; + +#[allow(dead_code)] +pub(crate) const DEFAULT_UNIX_FILE_ATTRS: u16 = 0o100644; +#[allow(dead_code)] +pub(crate) const DEFAULT_UNIX_DIR_ATTRS: u16 = 0o040755; + +#[cfg(target_os = "windows")] +pub(crate) const DEFAULT_WINDOWS_FILE_ATTRS: u16 = 128; +#[cfg(target_os = "windows")] +pub(crate) const DEFAULT_WINDOWS_DIR_ATTRS: u16 = 16; + +#[inline] +#[allow(dead_code)] +const fn convert_attrs(attrs: u32) -> u16 { + attrs as u16 +} + +pub(crate) fn attributes_from_fs(metadata: &Metadata) -> u16 { + #[cfg(target_os = "windows")] + { + use std::os::windows::fs::MetadataExt; + return convert_attrs(metadata.file_attributes()); + } + + #[cfg(target_os = "linux")] + { + use std::os::linux::fs::MetadataExt; + return convert_attrs(metadata.st_mode()); + } + + #[cfg(target_os = "macos")] + { + use std::os::darwin::fs::MetadataExt; + return convert_attrs(metadata.st_mode()); + } + + #[cfg(all(unix, not(target_os = "linux"), not(target_os = "macos")))] + { + use std::os::unix::fs::PermissionsExt; + return convert_attrs(metadata.permissions().mode()); + } + + #[cfg(not(any(target_os = "windows", target_os = "linux", target_os = "macos", unix)))] + { + if metadata.is_dir() { + return DEFAULT_UNIX_DIR_ATTRS; + } else { + return DEFAULT_UNIX_FILE_ATTRS; + } + } +} + +#[cfg(target_os = "windows")] +pub(crate) const fn default_file_attrs() -> u16 { + DEFAULT_WINDOWS_FILE_ATTRS +} + +#[cfg(not(windows))] +pub(crate) const fn default_file_attrs() -> u16 { + DEFAULT_UNIX_FILE_ATTRS +} + +#[cfg(target_os = "windows")] +pub(crate) const fn default_dir_attrs() -> u16 { + DEFAULT_WINDOWS_DIR_ATTRS +} + +#[cfg(any(target_os = "linux", unix))] +#[cfg(not(target_os = "windows"))] +pub(crate) const fn default_dir_attrs() -> u16 { + DEFAULT_UNIX_DIR_ATTRS +} + +#[cfg(not(any(target_os = "windows", target_os = "linux", unix)))] +pub(crate) const fn default_dir_attrs() -> u16 { + 0 +} diff --git a/src/bins/src/shared/fastzip/mtzip/zip_archive_parts/data.rs b/src/bins/src/shared/fastzip/mtzip/zip_archive_parts/data.rs new file mode 100644 index 00000000..877e75d4 --- /dev/null +++ b/src/bins/src/shared/fastzip/mtzip/zip_archive_parts/data.rs @@ -0,0 +1,156 @@ +use std::io::{Seek, Write}; +use std::sync::Mutex; + +use rayon::prelude::*; + +use super::file::{ZipFile, ZipFileNoData}; + +const END_OF_CENTRAL_DIR_SIGNATURE: u32 = 0x06054B50; + +#[derive(Debug, Default)] +pub struct ZipData { + pub files: Vec, +} + +impl ZipData { + pub fn write>( + &mut self, + buf: &mut W, + zip_file_iter: I, + ) -> std::io::Result<()> { + let zip_files = self.write_files_contained_and_iter(buf, zip_file_iter)?; + + let files_amount = super::files_amount_u16(&zip_files); + + let central_dir_offset = super::stream_position_u32(buf)?; + + self.write_central_dir(zip_files, buf)?; + + let central_dir_start = super::stream_position_u32(buf)?; + + self.write_end_of_central_directory( + buf, + central_dir_offset, + central_dir_start, + files_amount, + ) + } + + pub fn write_rayon>( + &mut self, + buf: &mut W, + zip_file_iter: I, + ) -> std::io::Result<()> { + let zip_files = self.write_files_contained_and_par_iter(buf, zip_file_iter)?; + + let files_amount = super::files_amount_u16(&zip_files); + + let central_dir_offset = super::stream_position_u32(buf)?; + + self.write_central_dir(zip_files, buf)?; + + let central_dir_start = super::stream_position_u32(buf)?; + + self.write_end_of_central_directory( + buf, + central_dir_offset, + central_dir_start, + files_amount, + ) + } + + #[inline] + fn write_files_contained_and_iter>( + &mut self, + buf: &mut W, + zip_files_iter: I, + ) -> std::io::Result> { + let zip_files = std::mem::take(&mut self.files); + self.write_files_iter(buf, zip_files.into_iter().chain(zip_files_iter)) + } + + #[inline] + pub fn write_files_contained_and_par_iter< + W: Write + Seek + Send, + I: ParallelIterator, + >( + &mut self, + buf: &mut W, + zip_files_iter: I, + ) -> std::io::Result> { + let zip_files = std::mem::take(&mut self.files); + self.write_files_par_iter(buf, zip_files.into_par_iter().chain(zip_files_iter)) + } + + pub fn write_files_iter>( + &mut self, + buf: &mut W, + zip_files: I, + ) -> std::io::Result> { + zip_files + .into_iter() + .map(|zipfile| zipfile.write_local_file_header_with_data_consuming(buf)) + .collect::>>() + } + + pub fn write_files_par_iter>( + &mut self, + buf: &mut W, + zip_files: I, + ) -> std::io::Result> { + let buf = Mutex::new(buf); + zip_files + .map(|zipfile| { + let mut buf_lock = buf.lock().unwrap(); + zipfile.write_local_file_header_with_data_consuming(*buf_lock) + }) + .collect::>>() + } + + fn write_central_dir>( + &self, + zip_files: I, + buf: &mut W, + ) -> std::io::Result<()> { + zip_files + .into_iter() + .try_for_each(|zip_file| zip_file.write_central_directory_entry(buf)) + } + + const FOOTER_LENGTH: usize = 22; + + fn write_end_of_central_directory( + &self, + buf: &mut W, + central_dir_offset: u32, + central_dir_start: u32, + files_amount: u16, + ) -> std::io::Result<()> { + // Temporary in-memory statically sized array + let mut central_dir = [0; Self::FOOTER_LENGTH]; + { + let mut central_dir_buf: &mut [u8] = &mut central_dir; + + // Signature + central_dir_buf.write_all(&END_OF_CENTRAL_DIR_SIGNATURE.to_le_bytes())?; + // number of this disk + central_dir_buf.write_all(&0_u16.to_le_bytes())?; + // number of the disk with start + central_dir_buf.write_all(&0_u16.to_le_bytes())?; + // Number of entries on this disk + central_dir_buf.write_all(&files_amount.to_le_bytes())?; + // Number of entries + central_dir_buf.write_all(&files_amount.to_le_bytes())?; + // Central dir size + central_dir_buf.write_all(&(central_dir_start - central_dir_offset).to_le_bytes())?; + // Central dir offset + central_dir_buf.write_all(¢ral_dir_offset.to_le_bytes())?; + // Comment length + central_dir_buf.write_all(&0_u16.to_le_bytes())?; + } + + buf.write_all(¢ral_dir)?; + + Ok(()) + } +} diff --git a/src/bins/src/shared/fastzip/mtzip/zip_archive_parts/extra_field.rs b/src/bins/src/shared/fastzip/mtzip/zip_archive_parts/extra_field.rs new file mode 100644 index 00000000..4849f28b --- /dev/null +++ b/src/bins/src/shared/fastzip/mtzip/zip_archive_parts/extra_field.rs @@ -0,0 +1,273 @@ +//! ZIP file extra field + +use std::{fs::Metadata, io::Write}; + +/// This is a structure containing [`ExtraField`]s associated with a file or directory in a zip +/// file, mostly used for filesystem properties, and this is the only functionality implemented +/// here. +/// +/// The [`new_from_fs`](Self::new_from_fs) method will use the metadata the filesystem provides to +/// construct the collection. +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct ExtraFields { + pub(crate) values: Vec, +} + +impl Extend for ExtraFields { + fn extend>(&mut self, iter: T) { + self.values.extend(iter) + } +} + +impl IntoIterator for ExtraFields { + type Item = as IntoIterator>::Item; + type IntoIter = as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.values.into_iter() + } +} + +impl ExtraFields { + /// Create a new set of [`ExtraField`]s. [`Self::new_from_fs`] should be preferred. + /// + /// # Safety + /// + /// All fields must have valid values depending on the field type. + pub unsafe fn new(fields: I) -> Self + where + I: IntoIterator, + { + Self { values: fields.into_iter().collect() } + } + + /// This method will use the filesystem metadata to get the properties that can be stored in + /// ZIP [`ExtraFields`]. + /// + /// The behavior is dependent on the target platform. Will return an empty set if the target os + /// is not Windows or Linux and not of UNIX family. + pub fn new_from_fs(metadata: &Metadata) -> Self { + #[cfg(target_os = "windows")] + { + return Self::new_windows(metadata); + } + + #[cfg(target_os = "linux")] + { + return Self::new_linux(metadata); + } + + #[cfg(all(unix, not(target_os = "linux")))] + { + return Self::new_unix(metadata); + } + } + + #[cfg(target_os = "linux")] + fn new_linux(metadata: &Metadata) -> Self { + use std::os::linux::fs::MetadataExt; + + let mod_time = Some(metadata.st_mtime() as i32); + let ac_time = Some(metadata.st_atime() as i32); + let cr_time = Some(metadata.st_ctime() as i32); + + let uid = metadata.st_uid(); + let gid = metadata.st_gid(); + + Self { values: vec![ExtraField::UnixExtendedTimestamp { mod_time, ac_time, cr_time }, ExtraField::UnixAttrs { uid, gid }] } + } + + #[cfg(all(unix, not(target_os = "linux")))] + #[allow(dead_code)] + fn new_unix(metadata: &Metadata) -> Self { + use std::os::unix::fs::MetadataExt; + + let mod_time = Some(metadata.mtime() as i32); + let ac_time = Some(metadata.atime() as i32); + let cr_time = Some(metadata.ctime() as i32); + + let uid = metadata.uid(); + let gid = metadata.gid(); + + Self { values: vec![ExtraField::UnixExtendedTimestamp { mod_time, ac_time, cr_time }, ExtraField::UnixAttrs { uid, gid }] } + } + + #[cfg(target_os = "windows")] + fn new_windows(metadata: &Metadata) -> Self { + use std::os::windows::fs::MetadataExt; + + let mtime = metadata.last_write_time(); + let atime = metadata.last_access_time(); + let ctime = metadata.creation_time(); + + Self { values: vec![ExtraField::Ntfs { mtime, atime, ctime }] } + } + + pub(crate) fn data_length(&self) -> u16 { + self.values.iter().map(|f| 4 + f.field_size::()).sum() + } + + pub(crate) fn write(&self, writer: &mut W) -> std::io::Result<()> { + for field in &self.values { + field.write::<_, CENTRAL_HEADER>(writer)?; + } + Ok(()) + } +} + +/// Extra data that can be associated with a file or directory. +/// +/// This library only implements the filesystem properties in NTFS and UNIX format. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ExtraField { + /// NTFS file properties. + Ntfs { + /// Last modification timestamp + mtime: u64, + /// Last access timestamp + atime: u64, + /// File/directory creation timestamp + ctime: u64, + }, + /// Info-Zip extended unix timestamp. Each part is optional by definition, but will be + /// populated by [`ExtraFields::new_from_fs`]. + UnixExtendedTimestamp { + /// Last modification timestamp + mod_time: Option, + /// Last access timestamp + ac_time: Option, + /// Creation timestamp + cr_time: Option, + }, + /// UNIX file/directory attributes defined by Info-Zip. + UnixAttrs { + /// UID of the owner + uid: u32, + /// GID of the group + gid: u32, + }, +} + +const MOD_TIME_PRESENT: u8 = 1; +const AC_TIME_PRESENT: u8 = 1 << 1; +const CR_TIME_PRESENT: u8 = 1 << 2; + +impl ExtraField { + #[inline] + fn header_id(&self) -> u16 { + match self { + Self::Ntfs { mtime: _, atime: _, ctime: _ } => 0x000a, + Self::UnixExtendedTimestamp { mod_time: _, ac_time: _, cr_time: _ } => 0x5455, + Self::UnixAttrs { uid: _, gid: _ } => 0x7875, + } + } + + #[inline] + const fn optional_field_size(field: &Option) -> u16 { + match field { + Some(_) => std::mem::size_of::() as u16, + None => 0, + } + } + + #[inline] + const fn field_size(&self) -> u16 { + match self { + Self::Ntfs { mtime: _, atime: _, ctime: _ } => 32, + Self::UnixExtendedTimestamp { mod_time, ac_time, cr_time } => { + 1 + Self::optional_field_size(mod_time) + { + if !CENTRAL_HEADER { + Self::optional_field_size(ac_time) + Self::optional_field_size(cr_time) + } else { + 0 + } + } + } + Self::UnixAttrs { uid: _, gid: _ } => 11, + } + } + + #[inline] + const fn if_present(val: Option, if_present: u8) -> u8 { + match val { + Some(_) => if_present, + None => 0, + } + } + + const NTFS_FIELD_LEN: usize = 32; + const UNIX_ATTRS_LEN: usize = 11; + + pub(crate) fn write(self, writer: &mut W) -> std::io::Result<()> { + // Header ID + writer.write_all(&self.header_id().to_le_bytes())?; + // Field data size + writer.write_all(&self.field_size::().to_le_bytes())?; + + match self { + Self::Ntfs { mtime, atime, ctime } => { + // Writing to a temporary in-memory array + let mut field = [0; Self::NTFS_FIELD_LEN]; + { + let mut field_buf: &mut [u8] = &mut field; + + // Reserved field + field_buf.write_all(&0_u32.to_le_bytes())?; + + // Tag1 number + field_buf.write_all(&1_u16.to_le_bytes())?; + // Tag1 size + field_buf.write_all(&24_u16.to_le_bytes())?; + + // Mtime + field_buf.write_all(&mtime.to_le_bytes())?; + // Atime + field_buf.write_all(&atime.to_le_bytes())?; + // Ctime + field_buf.write_all(&ctime.to_le_bytes())?; + } + + writer.write_all(&field)?; + } + Self::UnixExtendedTimestamp { mod_time, ac_time, cr_time } => { + let flags = Self::if_present(mod_time, MOD_TIME_PRESENT) + | Self::if_present(ac_time, AC_TIME_PRESENT) + | Self::if_present(cr_time, CR_TIME_PRESENT); + writer.write_all(&[flags])?; + if let Some(mod_time) = mod_time { + writer.write_all(&mod_time.to_le_bytes())?; + } + if !CENTRAL_HEADER { + if let Some(ac_time) = ac_time { + writer.write_all(&ac_time.to_le_bytes())?; + } + if let Some(cr_time) = cr_time { + writer.write_all(&cr_time.to_le_bytes())?; + } + } + } + Self::UnixAttrs { uid, gid } => { + // Writing to a temporary in-memory array + let mut field = [0; Self::UNIX_ATTRS_LEN]; + { + let mut field_buf: &mut [u8] = &mut field; + + // Version of the field + field_buf.write_all(&[1])?; + // UID size + field_buf.write_all(&[4])?; + // UID + field_buf.write_all(&uid.to_le_bytes())?; + // GID size + field_buf.write_all(&[4])?; + // GID + field_buf.write_all(&gid.to_le_bytes())?; + } + + writer.write_all(&field)?; + } + } + + Ok(()) + } +} diff --git a/src/bins/src/shared/fastzip/mtzip/zip_archive_parts/file.rs b/src/bins/src/shared/fastzip/mtzip/zip_archive_parts/file.rs new file mode 100644 index 00000000..14300e9c --- /dev/null +++ b/src/bins/src/shared/fastzip/mtzip/zip_archive_parts/file.rs @@ -0,0 +1,201 @@ +use std::io::{Seek, Write}; + +use super::extra_field::ExtraFields; +use super::super::{CompressionType, platform::VERSION_MADE_BY}; + +const LOCAL_FILE_HEADER_SIGNATURE: u32 = 0x04034B50; +const CENTRAL_FILE_HEADER_SIGNATURE: u32 = 0x02014B50; + +const VERSION_NEEDED_TO_EXTRACT: u16 = 20; + +/// Set bit 11 to indicate that the file names are in UTF-8, because all strings in rust are valid +/// UTF-8 +const GENERAL_PURPOSE_BIT_FLAG: u16 = 1 << 11; + +#[derive(Debug)] +pub struct ZipFile { + pub header: ZipFileHeader, + pub data: Vec, +} + +#[derive(Debug)] +pub struct ZipFileHeader { + pub compression_type: CompressionType, + pub crc: u32, + pub uncompressed_size: u32, + pub filename: String, + pub file_comment: Option, + pub external_file_attributes: u32, + pub extra_fields: ExtraFields, +} + +#[derive(Debug)] +pub struct ZipFileNoData { + pub header: ZipFileHeader, + pub local_header_offset: u32, + pub compressed_size: u32, +} + +impl ZipFile { + pub fn write_local_file_header_with_data_consuming( + self, + buf: &mut W, + ) -> std::io::Result { + let local_header_offset = super::stream_position_u32(buf)?; + self.write_local_file_header_and_data(buf)?; + let Self { header, data } = self; + Ok(ZipFileNoData { + header, + local_header_offset, + compressed_size: data.len() as u32, + }) + } + + const LOCAL_FILE_HEADER_LEN: usize = 30; + + pub fn write_local_file_header_and_data(&self, buf: &mut W) -> std::io::Result<()> { + // Writing to a temporary in-memory statically sized array first + let mut header = [0; Self::LOCAL_FILE_HEADER_LEN]; + { + let mut header_buf: &mut [u8] = &mut header; + + // signature + header_buf.write_all(&LOCAL_FILE_HEADER_SIGNATURE.to_le_bytes())?; + // version needed to extract + header_buf.write_all(&VERSION_NEEDED_TO_EXTRACT.to_le_bytes())?; + // general purpose bit flag + header_buf.write_all(&GENERAL_PURPOSE_BIT_FLAG.to_le_bytes())?; + // compression type + header_buf.write_all(&(self.header.compression_type as u16).to_le_bytes())?; + // Last modification time // moved to extra fields + header_buf.write_all(&0_u16.to_le_bytes())?; + // Last modification date // moved to extra fields + header_buf.write_all(&0_u16.to_le_bytes())?; + // crc + header_buf.write_all(&self.header.crc.to_le_bytes())?; + // Compressed size + debug_assert!(self.data.len() <= u32::MAX as usize); + header_buf.write_all(&(self.data.len() as u32).to_le_bytes())?; + // Uncompressed size + header_buf.write_all(&self.header.uncompressed_size.to_le_bytes())?; + // Filename size + debug_assert!(self.header.filename.len() <= u16::MAX as usize); + header_buf.write_all(&(self.header.filename.len() as u16).to_le_bytes())?; + // extra field size + header_buf.write_all( + &self + .header + .extra_fields + .data_length::() + .to_le_bytes(), + )?; + } + + buf.write_all(&header)?; + + // Filename + buf.write_all(self.header.filename.as_bytes())?; + // Extra field + self.header.extra_fields.write::<_, false>(buf)?; + + // Data + buf.write_all(&self.data)?; + + Ok(()) + } + + #[inline] + pub fn directory( + mut name: String, + extra_fields: ExtraFields, + external_attributes: u16, + file_comment: Option, + ) -> Self { + if !(name.ends_with('/') || name.ends_with('\\')) { + name += "/" + }; + Self { + header: ZipFileHeader { + compression_type: CompressionType::Stored, + crc: 0, + uncompressed_size: 0, + filename: name, + external_file_attributes: (external_attributes as u32) << 16, + extra_fields, + file_comment, + }, + data: vec![], + } + } +} + +impl ZipFileNoData { + const CENTRAL_DIR_ENTRY_LEN: usize = 46; + + pub fn write_central_directory_entry(&self, buf: &mut W) -> std::io::Result<()> { + // Writing to a temporary in-memory statically sized array first + let mut central_dir_entry_header = [0; Self::CENTRAL_DIR_ENTRY_LEN]; + { + let mut central_dir_entry_buf: &mut [u8] = &mut central_dir_entry_header; + + // signature + central_dir_entry_buf.write_all(&CENTRAL_FILE_HEADER_SIGNATURE.to_le_bytes())?; + // version made by + central_dir_entry_buf.write_all(&VERSION_MADE_BY.to_le_bytes())?; + // version needed to extract + central_dir_entry_buf.write_all(&VERSION_NEEDED_TO_EXTRACT.to_le_bytes())?; + // general purpose bit flag + central_dir_entry_buf.write_all(&GENERAL_PURPOSE_BIT_FLAG.to_le_bytes())?; + // compression type + central_dir_entry_buf + .write_all(&(self.header.compression_type as u16).to_le_bytes())?; + // Last modification time // moved to extra fields + central_dir_entry_buf.write_all(&0_u16.to_le_bytes())?; + // Last modification date // moved to extra fields + central_dir_entry_buf.write_all(&0_u16.to_le_bytes())?; + // crc + central_dir_entry_buf.write_all(&self.header.crc.to_le_bytes())?; + // Compressed size + central_dir_entry_buf.write_all(&self.compressed_size.to_le_bytes())?; + // Uncompressed size + central_dir_entry_buf.write_all(&self.header.uncompressed_size.to_le_bytes())?; + // Filename size + debug_assert!(self.header.filename.len() <= u16::MAX as usize); + central_dir_entry_buf.write_all(&(self.header.filename.len() as u16).to_le_bytes())?; + // extra field size + central_dir_entry_buf + .write_all(&self.header.extra_fields.data_length::().to_le_bytes())?; + // comment size + central_dir_entry_buf.write_all( + &(self + .header + .file_comment + .as_ref() + .map(|fc| fc.len()) + .unwrap_or(0) as u16) + .to_le_bytes(), + )?; + // disk number start + central_dir_entry_buf.write_all(&0_u16.to_le_bytes())?; + // internal file attributes + central_dir_entry_buf.write_all(&0_u16.to_le_bytes())?; + // external file attributes + central_dir_entry_buf.write_all(&self.header.external_file_attributes.to_le_bytes())?; + // relative offset of local header + central_dir_entry_buf.write_all(&self.local_header_offset.to_le_bytes())?; + } + + buf.write_all(¢ral_dir_entry_header)?; + + // Filename + buf.write_all(self.header.filename.as_bytes())?; + // Extra field + self.header.extra_fields.write::<_, true>(buf)?; + // File comment + if let Some(file_comment) = &self.header.file_comment { + buf.write_all(file_comment.as_bytes())?; + } + + Ok(()) + } +} diff --git a/src/bins/src/shared/fastzip/mtzip/zip_archive_parts/job.rs b/src/bins/src/shared/fastzip/mtzip/zip_archive_parts/job.rs new file mode 100644 index 00000000..cf5a3653 --- /dev/null +++ b/src/bins/src/shared/fastzip/mtzip/zip_archive_parts/job.rs @@ -0,0 +1,179 @@ +use std::{ + borrow::Cow, + fs::File, + io::Read, + panic::{RefUnwindSafe, UnwindSafe}, + path::Path, +}; + +use flate2::{CrcReader, read::DeflateEncoder}; + +use super::{extra_field::ExtraFields, file::ZipFile}; +use super::super::{ + CompressionType, level::CompressionLevel, platform::attributes_from_fs, + zip_archive_parts::file::ZipFileHeader, +}; + +pub enum ZipJobOrigin<'a> { + Directory, + Filesystem { path: Cow<'a, Path> }, + RawData(Cow<'a, [u8]>), + Reader(Box), +} + +impl core::fmt::Debug for ZipJobOrigin<'_> { + #[inline] + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Directory => f.write_str("Directory"), + Self::Filesystem { path } => f.debug_struct("Filesystem").field("path", &path).finish(), + Self::RawData(raw_data) => f.debug_tuple("RawData").field(&raw_data).finish(), + Self::Reader(_reader) => f.debug_tuple("Reader").finish_non_exhaustive(), + } + } +} + +#[derive(Debug)] +struct FileDigest { + data: Vec, + uncompressed_size: u32, + crc: u32, +} + +#[derive(Debug)] +pub struct ZipJob<'a> { + pub data_origin: ZipJobOrigin<'a>, + pub extra_fields: ExtraFields, + pub archive_path: String, + pub file_comment: Option, + pub external_attributes: u16, + /// Ignored when [`data_origin`](Self::data_origin) is a [`ZipJobOrigin::Directory`] + pub compression_level: CompressionLevel, + /// Ignored when [`data_origin`](Self::data_origin) is a [`ZipJobOrigin::Directory`] + pub compression_type: CompressionType, +} + +impl ZipJob<'_> { + fn compress_file( + source: R, + uncompressed_size_approx: Option, + compression_type: CompressionType, + compression_level: CompressionLevel, + ) -> std::io::Result { + let mut crc_reader = CrcReader::new(source); + let mut data = Vec::with_capacity(uncompressed_size_approx.unwrap_or(0) as usize); + let uncompressed_size = match compression_type { + CompressionType::Deflate => { + let mut encoder = DeflateEncoder::new(&mut crc_reader, compression_level.into()); + encoder.read_to_end(&mut data)?; + encoder.total_in() as usize + } + CompressionType::Stored => crc_reader.read_to_end(&mut data)?, + }; + debug_assert!(uncompressed_size <= u32::MAX as usize); + let uncompressed_size = uncompressed_size as u32; + data.shrink_to_fit(); + let crc = crc_reader.crc().sum(); + Ok(FileDigest { + data, + uncompressed_size, + crc, + }) + } + + pub fn into_file(self) -> std::io::Result { + match self.data_origin { + ZipJobOrigin::Directory => Ok(ZipFile::directory( + self.archive_path, + self.extra_fields, + self.external_attributes, + self.file_comment, + )), + ZipJobOrigin::Filesystem { path } => { + let file = File::open(path).unwrap(); + let file_metadata = file.metadata().unwrap(); + let uncompressed_size_approx = file_metadata.len(); + debug_assert!(uncompressed_size_approx <= u32::MAX.into()); + let uncompressed_size_approx = uncompressed_size_approx as u32; + let external_file_attributes = attributes_from_fs(&file_metadata); + let mut extra_fields = ExtraFields::new_from_fs(&file_metadata); + extra_fields.extend(self.extra_fields); + + let FileDigest { + data, + uncompressed_size, + crc, + } = Self::compress_file( + file, + Some(uncompressed_size_approx), + self.compression_type, + self.compression_level, + )?; + Ok(ZipFile { + header: ZipFileHeader { + compression_type: CompressionType::Deflate, + crc, + uncompressed_size, + filename: self.archive_path, + external_file_attributes: (external_file_attributes as u32) << 16, + extra_fields, + file_comment: self.file_comment, + }, + data, + }) + } + ZipJobOrigin::RawData(data) => { + let uncompressed_size_approx = data.len(); + debug_assert!(uncompressed_size_approx <= u32::MAX as usize); + let uncompressed_size_approx = uncompressed_size_approx as u32; + + let FileDigest { + data, + uncompressed_size, + crc, + } = Self::compress_file( + data.as_ref(), + Some(uncompressed_size_approx), + self.compression_type, + self.compression_level, + )?; + Ok(ZipFile { + header: ZipFileHeader { + compression_type: CompressionType::Deflate, + crc, + uncompressed_size, + filename: self.archive_path, + external_file_attributes: (self.external_attributes as u32) << 16, + extra_fields: self.extra_fields, + file_comment: self.file_comment, + }, + data, + }) + } + ZipJobOrigin::Reader(reader) => { + let FileDigest { + data, + uncompressed_size, + crc, + } = Self::compress_file( + reader, + None, + self.compression_type, + self.compression_level, + )?; + Ok(ZipFile { + header: ZipFileHeader { + compression_type: CompressionType::Deflate, + crc, + uncompressed_size, + filename: self.archive_path, + external_file_attributes: (self.external_attributes as u32) << 16, + extra_fields: self.extra_fields, + file_comment: self.file_comment, + }, + data, + }) + } + } + } +} diff --git a/src/bins/src/shared/fastzip/mtzip/zip_archive_parts/mod.rs b/src/bins/src/shared/fastzip/mtzip/zip_archive_parts/mod.rs new file mode 100644 index 00000000..e2f6b8e4 --- /dev/null +++ b/src/bins/src/shared/fastzip/mtzip/zip_archive_parts/mod.rs @@ -0,0 +1,17 @@ +pub mod data; +pub mod extra_field; +pub mod file; +pub mod job; +use std::io::Seek; +#[inline] +pub fn stream_position_u32(buf: &mut W) -> std::io::Result { + let offset = buf.stream_position()?; + debug_assert!(offset <= u32::MAX.into()); + Ok(offset as u32) +} +#[inline] +pub fn files_amount_u16(files: &[T]) -> u16 { + let amount = files.len(); + debug_assert!(amount <= u16::MAX as usize); + amount as u16 +} diff --git a/src/bins/src/shared/fastzip/progress_updater.rs b/src/bins/src/shared/fastzip/progress_updater.rs new file mode 100644 index 00000000..c7385bf4 --- /dev/null +++ b/src/bins/src/shared/fastzip/progress_updater.rs @@ -0,0 +1,142 @@ +// Copyright 2023 Google LLC + +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::cmp::min; + +/// A struct which can issue periodic updates indicating progress towards +/// an external total, based on updates towards an internal goal. +pub struct ProgressUpdater { + callback: F, + internal_progress: u64, + per_update_internal: u64, + update_external_amount: u64, + external_updates_sent: u64, + remainder_external: u64, + internal_total: u64, +} + +impl ProgressUpdater { + /// Create a new progress updater, with a callback to be called periodically. + pub fn new(callback: F, external_total: u64, internal_total: u64, per_update_internal: u64) -> Self { + let per_update_internal = min(internal_total, per_update_internal); + let total_updates_expected = if per_update_internal == 0 { 0 } else { internal_total / per_update_internal }; + let (update_external_amount, remainder_external) = if total_updates_expected == 0 { + (0, external_total) + } else { + (external_total / total_updates_expected, external_total % total_updates_expected) + }; + Self { + callback, + internal_progress: 0u64, + per_update_internal, + update_external_amount, + external_updates_sent: 0u64, + remainder_external, + internal_total, + } + } + + /// Indicate some progress towards the internal goal. May call back the + /// external callback function to show some progress towards the external + /// goal. + pub fn progress(&mut self, amount_internal: u64) { + self.internal_progress += amount_internal; + self.send_due_updates(); + } + + fn send_due_updates(&mut self) { + let updates_due = if self.per_update_internal == 0 { 0 } else { self.internal_progress / self.per_update_internal }; + while updates_due > self.external_updates_sent { + (self.callback)(self.update_external_amount); + self.external_updates_sent += 1; + } + } + + /// Indicate completion of the task. Fully update the callback towards the + /// external state. + pub fn finish(&mut self) { + self.internal_progress = self.internal_total; + self.send_due_updates(); + if self.remainder_external > 0 { + (self.callback)(self.remainder_external); + } + } +} + +#[test] +fn test_progress_updater() { + let amount_received = std::rc::Rc::new(std::cell::RefCell::new(0u64)); + let mut progresser = ProgressUpdater::new( + |progress| { + *(amount_received.borrow_mut()) += progress; + }, + 100, + 1000, + 100, + ); + assert_eq!(*amount_received.borrow(), 0); + progresser.progress(1); + assert_eq!(*amount_received.borrow(), 0); + progresser.progress(100); + assert_eq!(*amount_received.borrow(), 10); + progresser.progress(800); + assert_eq!(*amount_received.borrow(), 90); + progresser.finish(); + assert_eq!(*amount_received.borrow(), 100); +} + +#[test] +fn test_progress_updater_zero_external() { + let amount_received = std::rc::Rc::new(std::cell::RefCell::new(0u64)); + let mut progresser = ProgressUpdater::new( + |progress| { + *(amount_received.borrow_mut()) += progress; + }, + 0, + 1000, + 100, + ); + assert_eq!(*amount_received.borrow(), 0); + progresser.progress(1); + progresser.progress(800); + progresser.finish(); + assert_eq!(*amount_received.borrow(), 0); +} + +#[test] +fn test_progress_updater_small_internal() { + let amount_received = std::rc::Rc::new(std::cell::RefCell::new(0u64)); + let mut progresser = ProgressUpdater::new( + |progress| { + *(amount_received.borrow_mut()) += progress; + }, + 100, + 5, + 100, + ); + assert_eq!(*amount_received.borrow(), 0); + progresser.progress(1); + progresser.finish(); + assert_eq!(*amount_received.borrow(), 100); +} + +#[test] +fn test_progress_updater_zero_internal() { + let amount_received = std::rc::Rc::new(std::cell::RefCell::new(0u64)); + let mut progresser = ProgressUpdater::new( + |progress| { + *(amount_received.borrow_mut()) += progress; + }, + 100, + 0, + 100, + ); + assert_eq!(*amount_received.borrow(), 0); + progresser.finish(); + assert_eq!(*amount_received.borrow(), 100); +} diff --git a/src/bins/src/shared/fastzip/ripunzip.rs b/src/bins/src/shared/fastzip/ripunzip.rs new file mode 100644 index 00000000..7c5096a3 --- /dev/null +++ b/src/bins/src/shared/fastzip/ripunzip.rs @@ -0,0 +1,327 @@ +// Copyright 2022 Google LLC + +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::{ + borrow::Cow, + fs::File, + io::{ErrorKind, Read, Seek, SeekFrom}, + path::{Path, PathBuf}, + sync::Mutex, +}; + +use anyhow::{Context, Result}; +use rayon::prelude::*; +use zip::{read::ZipFile, ZipArchive}; + +use super::{cloneable_seekable_reader::CloneableSeekableReader, progress_updater::ProgressUpdater, UnzipProgressReporter}; + +pub(crate) fn determine_stream_len(stream: &mut R) -> std::io::Result { + let old_pos = stream.stream_position()?; + let len = stream.seek(SeekFrom::End(0))?; + if old_pos != len { + stream.seek(SeekFrom::Start(old_pos))?; + } + Ok(len) +} + +/// Options for unzipping. +pub struct UnzipOptions<'a, 'b> { + /// The destination directory. + pub output_directory: Option, + /// Password if encrypted. + pub password: Option, + /// Whether to run in single-threaded mode. + pub single_threaded: bool, + /// A filename filter, optionally + pub filename_filter: Option>, + /// An object to receive notifications of unzip progress. + pub progress_reporter: Box, +} + +/// An object which can unzip a zip file, in its entirety, from a local +/// file or from a network stream. It tries to do this in parallel wherever +/// possible. +pub struct UnzipEngine { + zipfile: Box, + compressed_length: u64, + directory_creator: DirectoryCreator, +} + +/// Code which can determine whether to unzip a given filename. +pub trait FilenameFilter { + /// Returns true if the given filename should be unzipped. + fn should_unzip(&self, filename: &str) -> bool; +} + +/// The underlying engine used by the unzipper. This is different +/// for files and URIs. +trait UnzipEngineImpl { + fn unzip(&mut self, options: UnzipOptions, directory_creator: &DirectoryCreator) -> Vec; + + // Due to lack of RPITIT we'll return a Vec here + fn list(&self) -> Result, anyhow::Error>; +} + +/// Engine which knows how to unzip a file. +#[derive(Clone)] +struct UnzipFileEngine(ZipArchive>); + +impl UnzipEngineImpl for UnzipFileEngine { + fn unzip(&mut self, options: UnzipOptions, directory_creator: &DirectoryCreator) -> Vec { + unzip_serial_or_parallel(self.0.len(), options, directory_creator, || self.0.clone(), || {}) + } + + fn list(&self) -> Result, anyhow::Error> { + list(&self.0) + } +} + +impl UnzipEngine { + /// Create an unzip engine which knows how to unzip a file. + pub fn for_file(mut zipfile: File) -> Result { + // The following line doesn't actually seem to make any significant + // performance difference. + // let zipfile = BufReader::new(zipfile); + let compressed_length = determine_stream_len(&mut zipfile)?; + let zipfile = CloneableSeekableReader::new(zipfile); + Ok(Self { + zipfile: Box::new(UnzipFileEngine(ZipArchive::new(zipfile)?)), + compressed_length, + directory_creator: DirectoryCreator::default(), + }) + } + + /// The total compressed length that we expect to retrieve over + /// the network or from the compressed file. + pub fn zip_length(&self) -> u64 { + self.compressed_length + } + + // Perform the unzip. + pub fn unzip(mut self, options: UnzipOptions) -> Result<()> { + log::debug!("Starting extract"); + options.progress_reporter.total_bytes_expected(self.compressed_length); + let errors = self.zipfile.unzip(options, &self.directory_creator); + // Return the first error code, if any. + errors.into_iter().next().map(Result::Err).unwrap_or(Ok(())) + } + + /// List the filenames in the archive + pub fn list(self) -> Result> { + // In future this might be a more dynamic iterator type. + self.zipfile.list().map(|mut v| { + // Names are returned in a HashMap iteration order so let's + // sort thme to be more reasonable + v.sort(); + v.into_iter() + }) + } +} + +/// Return a list of filenames from the zip. For now this is infallible +/// but provide the option of an error code in case we do something +/// smarter in future. +fn list<'a, T: Read + Seek + 'a>(zip_archive: &ZipArchive) -> Result> { + Ok(zip_archive.file_names().map(|s| s.to_string()).collect()) +} + +fn unzip_serial_or_parallel<'a, T: Read + Seek + 'a>( + len: usize, + options: UnzipOptions, + directory_creator: &DirectoryCreator, + get_ziparchive_clone: impl Fn() -> ZipArchive + Sync, + // Call when a file is going to be skipped + file_skip_callback: impl Fn() + Sync + Send + Clone, +) -> Vec { + let progress_reporter: &dyn UnzipProgressReporter = options.progress_reporter.as_ref(); + match (options.filename_filter, options.single_threaded) { + (None, true) => (0..len) + .map(|i| { + extract_file_by_index( + &get_ziparchive_clone, + i, + &options.output_directory, + &options.password, + progress_reporter, + directory_creator, + ) + }) + .filter_map(Result::err) + .collect(), + (None, false) => { + // We use par_bridge here rather than into_par_iter because it turns + // out to better preserve ordering of the IDs in the input range, + // i.e. we're more likely to ask our initial threads to act upon + // file IDs 0, 1, 2, 3, 4, 5 rather than 0, 1000, 2000, 3000 etc. + // On a device which is CPU-bound or IO-bound (rather than network + // bound) that's beneficial because we can start to decompress + // and write data to disk as soon as it arrives from the network. + (0..len) + .par_bridge() + .map(|i| { + extract_file_by_index( + &get_ziparchive_clone, + i, + &options.output_directory, + &options.password, + progress_reporter, + directory_creator, + ) + }) + .filter_map(Result::err) + .collect() + } + (Some(filename_filter), single_threaded) => { + // If we have a filename filter, an easy thing would be to + // iterate through each file index as above, and check to see if its + // name matches. Unfortunately, that seeks all over the place + // to get the filename from the local header. + // Instead, let's get a list of the filenames we need + // and request them from the zip library directly. + // As we can't predict their order in the file, this may involve + // arbitrary rewinds, so let's do it single-threaded. + if !single_threaded { + log::warn!("Unzipping specific files - assuming --single-threaded since we currently cannot unzip specific files in a multi-threaded mode. If you need that, consider launching multiple copies of ripunzip in parallel."); + } + let mut filenames: Vec<_> = get_ziparchive_clone() + .file_names() + .filter(|name| filename_filter.as_ref().should_unzip(name)) + .map(|s| s.to_string()) + .collect(); + // The filenames returned by the file_names() method above are in + // HashMap iteration order (i.e. random). To avoid creating lots + // of HTTPS streams for files which are nearby each other in the + // zip, we'd ideally extract them in order of file position. + // We have no way of knowing file position (without iterating the + // whole file) so instead let's sort them and hope that files were + // zipped in alphabetical order, or close to it. If we're wrong, + // we'll just end up rewinding, that is, creating extra redundant + // HTTP(S) streams. + filenames.sort(); + log::info!("Will unzip {} matching filenames", filenames.len()); + file_skip_callback(); + + // let progress_reporter: &dyn UnzipProgressReporter = options.progress_reporter.as_ref(); + filenames + .into_iter() + .map(|name| { + let myzip: &mut zip::ZipArchive = &mut get_ziparchive_clone(); + let file: ZipFile = match &options.password { + None => myzip.by_name(&name)?, + Some(string) => myzip.by_name_decrypt(&name, string.as_bytes())?, + }; + let r = extract_file(file, &options.output_directory, progress_reporter, directory_creator); + file_skip_callback(); + r + }) + .filter_map(Result::err) + .collect() + } + } +} + +fn extract_file_by_index<'a, T: Read + Seek + 'a>( + get_ziparchive_clone: impl Fn() -> ZipArchive + Sync, + i: usize, + output_directory: &Option, + password: &Option, + progress_reporter: &dyn UnzipProgressReporter, + directory_creator: &DirectoryCreator, +) -> Result<(), anyhow::Error> { + let myzip: &mut zip::ZipArchive = &mut get_ziparchive_clone(); + let file: ZipFile = match password { + None => myzip.by_index(i)?, + Some(string) => myzip.by_index_decrypt(i, string.as_bytes())?, + }; + extract_file(file, output_directory, progress_reporter, directory_creator) +} + +fn extract_file( + file: ZipFile, + output_directory: &Option, + progress_reporter: &dyn UnzipProgressReporter, + directory_creator: &DirectoryCreator, +) -> Result<(), anyhow::Error> { + let name = file.enclosed_name().as_deref().map(Path::to_string_lossy).unwrap_or_else(|| Cow::Borrowed("")).to_string(); + extract_file_inner(file, output_directory, progress_reporter, directory_creator).with_context(|| format!("Failed to extract {name}")) +} + +/// Extracts a file from a zip file. +fn extract_file_inner( + mut file: ZipFile, + output_directory: &Option, + progress_reporter: &dyn UnzipProgressReporter, + directory_creator: &DirectoryCreator, +) -> Result<()> { + let name = file.enclosed_name().ok_or_else(|| std::io::Error::new(ErrorKind::Unsupported, "path not safe to extract"))?; + let display_name = name.display().to_string(); + let out_path = match output_directory { + Some(output_directory) => output_directory.join(name), + None => name, + }; + progress_reporter.extraction_starting(&display_name); + log::debug!("Start extract of file at {:x}, length {:x}, name {}", file.data_start(), file.compressed_size(), display_name); + if file.name().ends_with('/') { + directory_creator.create_dir_all(&out_path)?; + } else { + if let Some(parent) = out_path.parent() { + directory_creator.create_dir_all(parent)?; + } + let out_file = File::create(&out_path).with_context(|| "Failed to create file")?; + // Progress bar strategy. The overall progress across the entire zip file must be + // denoted in terms of *compressed* bytes, since at the outset we don't know the uncompressed + // size of each file. Yet, within a given file, we update progress based on the bytes + // of uncompressed data written, once per 1MB, because that's the information that we happen + // to have available. So, calculate how many compressed bytes relate to 1MB of uncompressed + // data, and the remainder. + let uncompressed_size = file.size(); + let compressed_size = file.compressed_size(); + let mut progress_updater = ProgressUpdater::new( + |external_progress| { + progress_reporter.bytes_extracted(external_progress); + }, + compressed_size, + uncompressed_size, + 1024 * 1024, + ); + let mut out_file = progress_streams::ProgressWriter::new(out_file, |bytes_written| progress_updater.progress(bytes_written as u64)); + // Using a BufWriter here doesn't improve performance even on a VM with + // spinny disks. + std::io::copy(&mut file, &mut out_file).with_context(|| "Failed to write directory")?; + progress_updater.finish(); + } + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + if let Some(mode) = file.unix_mode() { + std::fs::set_permissions(&out_path, std::fs::Permissions::from_mode(mode)).with_context(|| "Failed to set permissions")?; + } + } + log::debug!("Finished extract of file at {:x}, length {:x}, name {}", file.data_start(), file.compressed_size(), display_name); + progress_reporter.extraction_finished(&display_name); + Ok(()) +} + +/// An engine used to ensure we don't conflict in creating directories +/// between threads +#[derive(Default)] +struct DirectoryCreator(Mutex<()>); + +impl DirectoryCreator { + fn create_dir_all(&self, path: &Path) -> Result<()> { + // Fast path - avoid locking if the directory exists + if path.exists() { + return Ok(()); + } + let _exclusivity = self.0.lock().unwrap(); + if path.exists() { + return Ok(()); + } + std::fs::create_dir_all(path).with_context(|| "Failed to create directory") + } +} diff --git a/src/bins/src/shared/mod.rs b/src/bins/src/shared/mod.rs index f1523676..9c089cec 100644 --- a/src/bins/src/shared/mod.rs +++ b/src/bins/src/shared/mod.rs @@ -1,5 +1,6 @@ pub mod runtime_arch; pub mod cli_host; +pub mod fastzip; mod dialogs_const; mod dialogs_common; diff --git a/src/bins/src/shared/runtime_arch.rs b/src/bins/src/shared/runtime_arch.rs index 639f287d..4845ea96 100644 --- a/src/bins/src/shared/runtime_arch.rs +++ b/src/bins/src/shared/runtime_arch.rs @@ -78,7 +78,7 @@ fn check_arch_windows() -> Option { #[cfg(target_os = "windows")] type IsWow64Process2Fn = unsafe extern "system" fn( - hProcess: windows::Win32::Foundation::HANDLE, + hprocess: windows::Win32::Foundation::HANDLE, pprocessmachine: *mut windows::Win32::System::SystemInformation::IMAGE_FILE_MACHINE, pnativemachine: *mut windows::Win32::System::SystemInformation::IMAGE_FILE_MACHINE, ) -> windows::core::BOOL; diff --git a/src/bins/src/windows/mitigate.rs b/src/bins/src/windows/mitigate.rs index 98ddd2bb..fb1d27e9 100644 --- a/src/bins/src/windows/mitigate.rs +++ b/src/bins/src/windows/mitigate.rs @@ -3,7 +3,7 @@ use windows::Win32::System::LibraryLoader::LOAD_LIBRARY_SEARCH_SYSTEM32; use windows::Win32::System::LibraryLoader::LOAD_LIBRARY_FLAGS; #[cfg(target_os = "windows")] -type SetDefaultDllDirectoriesFn = unsafe extern "system" fn(DirectoryFlags: u32) -> BOOL; +type SetDefaultDllDirectoriesFn = unsafe extern "system" fn(directory_flags: u32) -> BOOL; #[cfg(target_os = "windows")] unsafe fn set_default_dll_directories(flags: LOAD_LIBRARY_FLAGS) {