diff --git a/Cargo.lock b/Cargo.lock index d30226c6c..cf43aec4c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -60,7 +60,7 @@ checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" dependencies = [ "getrandom 0.2.11", "once_cell", - "version_check 0.9.4", + "version_check", ] [[package]] @@ -72,7 +72,7 @@ dependencies = [ "cfg-if", "getrandom 0.2.11", "once_cell", - "version_check 0.9.4", + "version_check", "zerocopy", ] @@ -316,7 +316,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "279cf904654eeebfa37ac9bb1598880884924aab82e290aa65c9e77a0e142e06" dependencies = [ "async-lock 2.8.0", - "autocfg 1.1.0", + "autocfg", "blocking", "futures-lite 1.13.0", ] @@ -343,11 +343,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" dependencies = [ "async-lock 2.8.0", - "autocfg 1.1.0", + "autocfg", "cfg-if", "concurrent-queue", "futures-lite 1.13.0", - "log 0.4.20", + "log", "parking", "polling 2.8.0", "rustix 0.37.27", @@ -460,7 +460,7 @@ dependencies = [ "futures-lite 1.13.0", "gloo-timers", "kv-log-macro", - "log 0.4.20", + "log", "memchr", "once_cell", "pin-project-lite", @@ -559,7 +559,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d9a9bf8b79a749ee0b911b91b671cc2b6c670bdbc7e3dfd537576ddc94bb2a2" dependencies = [ "http", - "log 0.4.20", + "log", "url", ] @@ -574,15 +574,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "autocfg" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dde43e75fd43e8a1bf86103336bc699aa8d17ad1be60c76c0bdfd4828e19b78" -dependencies = [ - "autocfg 1.1.0", -] - [[package]] name = "autocfg" version = "1.1.0" @@ -606,7 +597,7 @@ dependencies = [ "itoa", "matchit", "memchr", - "mime 0.3.17", + "mime", "percent-encoding 2.3.0", "pin-project-lite", "rustversion", @@ -628,7 +619,7 @@ dependencies = [ "futures-util", "http", "http-body", - "mime 0.3.17", + "mime", "rustversion", "tower-layer", "tower-service", @@ -906,16 +897,6 @@ dependencies = [ "tinyvec", ] -[[package]] -name = "buf_redux" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b953a6887648bb07a535631f2bc00fbdb2a2216f135552cb3f534ed136b9c07f" -dependencies = [ - "memchr", - "safemem", -] - [[package]] name = "built" version = "0.6.1" @@ -1104,8 +1085,8 @@ dependencies = [ "hyper-openssl", "hyper-tls", "lazy_static", - "log 0.4.20", - "mime 0.3.17", + "log", + "mime", "native-tls", "openssl", "percent-encoding 2.3.0", @@ -1189,6 +1170,7 @@ dependencies = [ "futures-util", "go-parse-duration", "hex", + "iroh-bitswap", "iroh-car", "iroh-rpc-client", "iroh-rpc-types", @@ -1227,10 +1209,8 @@ dependencies = [ "hyper-openssl", "hyper-tls", "lazy_static", - "log 0.4.20", - "mime 0.2.6", - "mime 0.3.17", - "multipart", + "log", + "mime", "native-tls", "openssl", "percent-encoding 2.3.0", @@ -1286,6 +1266,7 @@ dependencies = [ "ceramic-kubo-rpc-server", "ceramic-metrics", "ceramic-p2p", + "ceramic-store", "chrono", "cid 0.10.1", "clap 4.4.8", @@ -1298,6 +1279,7 @@ dependencies = [ "glob", "home", "hyper", + "iroh-bitswap", "iroh-rpc-client", "iroh-rpc-types", "libipld 0.16.0", @@ -1334,6 +1316,7 @@ dependencies = [ "bytes 1.5.0", "ceramic-core", "ceramic-metrics", + "ceramic-store", "chrono", "cid 0.10.1", "clap 4.4.8", @@ -1371,6 +1354,34 @@ dependencies = [ "zeroize", ] +[[package]] +name = "ceramic-store" +version = "0.9.0" +dependencies = [ + "anyhow", + "async-trait", + "bytes 1.5.0", + "ceramic-core", + "ceramic-metrics", + "cid 0.10.1", + "expect-test", + "futures", + "hex", + "iroh-bitswap", + "iroh-car", + "itertools 0.12.0", + "libipld 0.16.0", + "libipld-cbor 0.16.0", + "multihash 0.18.1", + "rand 0.8.5", + "recon", + "sqlx", + "test-log", + "tokio", + "tracing", + "tracing-subscriber", +] + [[package]] name = "cfg-if" version = "1.0.0" @@ -1526,7 +1537,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5fdbb015d790cfb378aca82caf9cc52a38be96a7eecdb92f31b4366a8afc019" dependencies = [ "clap 4.4.8", - "log 0.4.20", + "log", ] [[package]] @@ -1577,15 +1588,6 @@ dependencies = [ "cc", ] -[[package]] -name = "cloudabi" -version = "0.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "codespan-reporting" version = "0.11.1" @@ -1810,7 +1812,7 @@ version = "0.9.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" dependencies = [ - "autocfg 1.1.0", + "autocfg", "cfg-if", "crossbeam-utils", "memoffset 0.9.0", @@ -2503,7 +2505,7 @@ version = "0.14.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c533630cf40e9caa44bd91aadc88a75d75a4c3a12b4cfde353cbed41daa1e1f1" dependencies = [ - "log 0.4.20", + "log", ] [[package]] @@ -2535,7 +2537,7 @@ checksum = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36" dependencies = [ "atty", "humantime 1.3.0", - "log 0.4.20", + "log", "regex", "termcolor", ] @@ -2546,7 +2548,7 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3" dependencies = [ - "log 0.4.20", + "log", "regex", ] @@ -2859,12 +2861,6 @@ dependencies = [ "syn 0.15.44", ] -[[package]] -name = "fuchsia-cprng" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" - [[package]] name = "funty" version = "1.1.0" @@ -3054,7 +3050,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", - "version_check 0.9.4", + "version_check", "zeroize", ] @@ -3127,7 +3123,7 @@ dependencies = [ "bitflags 1.3.2", "libc", "libgit2-sys", - "log 0.4.20", + "log", "url", ] @@ -3473,11 +3469,11 @@ dependencies = [ "bytes 0.4.12", "httparse", "language-tags", - "log 0.4.20", - "mime 0.3.17", + "log", + "mime", "percent-encoding 1.0.1", "time 0.1.45", - "unicase 2.7.0", + "unicase", ] [[package]] @@ -3598,7 +3594,7 @@ dependencies = [ "futures", "if-addrs", "ipnet", - "log 0.4.20", + "log", "rtnetlink", "smol", "system-configuration", @@ -3624,7 +3620,7 @@ dependencies = [ "futures", "http", "hyper", - "log 0.4.20", + "log", "rand 0.8.5", "tokio", "url", @@ -3637,7 +3633,7 @@ version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ - "autocfg 1.1.0", + "autocfg", "hashbrown 0.12.3", "serde", ] @@ -3888,6 +3884,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.9" @@ -3986,8 +3991,8 @@ dependencies = [ "langtag", "locspan", "locspan-derive", - "log 0.4.20", - "mime 0.3.17", + "log", + "mime", "mown", "once_cell", "permutohedron", @@ -4133,7 +4138,7 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" dependencies = [ - "log 0.4.20", + "log", ] [[package]] @@ -4294,7 +4299,7 @@ dependencies = [ "libipld-core 0.14.0", "libipld-json 0.14.0", "libipld-macro 0.14.0", - "log 0.4.20", + "log", "multihash 0.16.3", "parking_lot", "thiserror", @@ -4313,7 +4318,7 @@ dependencies = [ "libipld-json 0.16.0", "libipld-macro 0.16.0", "libipld-pb", - "log 0.4.20", + "log", "multihash 0.18.1", "thiserror", ] @@ -5096,7 +5101,7 @@ version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" dependencies = [ - "autocfg 1.1.0", + "autocfg", "scopeguard", ] @@ -5122,15 +5127,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "log" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e19e8d5c34a3e0e2223db8e060f9e8264aeeb5c5fc64a4ee9965c062211c024b" -dependencies = [ - "log 0.4.20", -] - [[package]] name = "log" version = "0.4.20" @@ -5219,7 +5215,7 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" dependencies = [ - "autocfg 1.1.0", + "autocfg", ] [[package]] @@ -5228,16 +5224,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" dependencies = [ - "autocfg 1.1.0", -] - -[[package]] -name = "mime" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba626b8a6de5da682e1caa06bdb42a335aee5a84db8e5046a3e8ab17ba0a3ae0" -dependencies = [ - "log 0.3.9", + "autocfg", ] [[package]] @@ -5246,18 +5233,6 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" -[[package]] -name = "mime_guess" -version = "1.8.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216929a5ee4dd316b1702eedf5e74548c123d370f47841ceaac38ca154690ca3" -dependencies = [ - "mime 0.2.6", - "phf", - "phf_codegen", - "unicase 1.4.2", -] - [[package]] name = "minicbor" version = "0.19.1" @@ -5448,24 +5423,6 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" -[[package]] -name = "multipart" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "136eed74cadb9edd2651ffba732b19a450316b680e4f48d6c79e905799e19d01" -dependencies = [ - "buf_redux", - "httparse", - "log 0.4.20", - "mime 0.2.6", - "mime_guess", - "quick-error", - "rand 0.6.5", - "safemem", - "tempfile", - "twoway", -] - [[package]] name = "multistream-select" version = "0.13.0" @@ -5474,7 +5431,7 @@ checksum = "ea0df8e5eec2298a62b326ee4f0d7fe1a6b90a09dfcf9df37b38f947a8c42f19" dependencies = [ "bytes 1.5.0", "futures", - "log 0.4.20", + "log", "pin-project", "smallvec", "unsigned-varint 0.7.2", @@ -5506,7 +5463,7 @@ checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" dependencies = [ "lazy_static", "libc", - "log 0.4.20", + "log", "openssl", "openssl-probe", "openssl-sys", @@ -5562,7 +5519,7 @@ checksum = "65b4b14489ab424703c092062176d52ba55485a89c076b4f9db05092b7223aa6" dependencies = [ "bytes 1.5.0", "futures", - "log 0.4.20", + "log", "netlink-packet-core", "netlink-sys", "thiserror", @@ -5579,7 +5536,7 @@ dependencies = [ "bytes 1.5.0", "futures", "libc", - "log 0.4.20", + "log", "tokio", ] @@ -5660,7 +5617,7 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" dependencies = [ - "autocfg 1.1.0", + "autocfg", "num-integer", "num-traits", ] @@ -5699,7 +5656,7 @@ version = "0.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" dependencies = [ - "autocfg 1.1.0", + "autocfg", "num-traits", ] @@ -5709,7 +5666,7 @@ version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" dependencies = [ - "autocfg 1.1.0", + "autocfg", "num-integer", "num-traits", ] @@ -5720,7 +5677,7 @@ version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" dependencies = [ - "autocfg 1.1.0", + "autocfg", "libm", ] @@ -6082,52 +6039,13 @@ dependencies = [ "indexmap 2.1.0", ] -[[package]] -name = "phf" -version = "0.7.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3da44b85f8e8dfaec21adae67f95d93244b2ecf6ad2a692320598dcc8e6dd18" -dependencies = [ - "phf_shared 0.7.24", -] - -[[package]] -name = "phf_codegen" -version = "0.7.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b03e85129e324ad4166b06b2c7491ae27fe3ec353af72e72cd1654c7225d517e" -dependencies = [ - "phf_generator", - "phf_shared 0.7.24", -] - -[[package]] -name = "phf_generator" -version = "0.7.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09364cc93c159b8b06b1f4dd8a4398984503483891b0c26b867cf431fb132662" -dependencies = [ - "phf_shared 0.7.24", - "rand 0.6.5", -] - -[[package]] -name = "phf_shared" -version = "0.7.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "234f71a15de2288bcb7e3b6515828d22af7ec8598ee6d24c3b526fa0a80b67a0" -dependencies = [ - "siphasher 0.2.3", - "unicase 1.4.2", -] - [[package]] name = "phf_shared" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" dependencies = [ - "siphasher 0.3.11", + "siphasher", ] [[package]] @@ -6290,12 +6208,12 @@ version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" dependencies = [ - "autocfg 1.1.0", + "autocfg", "bitflags 1.3.2", "cfg-if", "concurrent-queue", "libc", - "log 0.4.20", + "log", "pin-project-lite", "windows-sys", ] @@ -6454,7 +6372,7 @@ dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", "syn 1.0.109", - "version_check 0.9.4", + "version_check", ] [[package]] @@ -6465,7 +6383,7 @@ checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ "proc-macro2 1.0.76", "quote 1.0.35", - "version_check 0.9.4", + "version_check", ] [[package]] @@ -6541,7 +6459,7 @@ dependencies = [ "heck", "itertools 0.10.5", "lazy_static", - "log 0.4.20", + "log", "multimap", "petgraph", "prettyplease", @@ -6640,7 +6558,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6" dependencies = [ "env_logger 0.8.4", - "log 0.4.20", + "log", "rand 0.8.5", ] @@ -6716,25 +6634,6 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "643f8f41a8ebc4c5dc4515c82bb8abd397b527fc20fd681b7c011c2aee5d44fb" -[[package]] -name = "rand" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" -dependencies = [ - "autocfg 0.1.8", - "libc", - "rand_chacha 0.1.1", - "rand_core 0.4.2", - "rand_hc 0.1.0", - "rand_isaac", - "rand_jitter", - "rand_os", - "rand_pcg", - "rand_xorshift", - "winapi", -] - [[package]] name = "rand" version = "0.7.3" @@ -6745,7 +6644,7 @@ dependencies = [ "libc", "rand_chacha 0.2.2", "rand_core 0.5.1", - "rand_hc 0.2.0", + "rand_hc", ] [[package]] @@ -6759,16 +6658,6 @@ dependencies = [ "rand_core 0.6.4", ] -[[package]] -name = "rand_chacha" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" -dependencies = [ - "autocfg 0.1.8", - "rand_core 0.3.1", -] - [[package]] name = "rand_chacha" version = "0.2.2" @@ -6789,21 +6678,6 @@ dependencies = [ "rand_core 0.6.4", ] -[[package]] -name = "rand_core" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" -dependencies = [ - "rand_core 0.4.2", -] - -[[package]] -name = "rand_core" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" - [[package]] name = "rand_core" version = "0.5.1" @@ -6822,15 +6696,6 @@ dependencies = [ "getrandom 0.2.11", ] -[[package]] -name = "rand_hc" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" -dependencies = [ - "rand_core 0.3.1", -] - [[package]] name = "rand_hc" version = "0.2.0" @@ -6840,59 +6705,6 @@ dependencies = [ "rand_core 0.5.1", ] -[[package]] -name = "rand_isaac" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" -dependencies = [ - "rand_core 0.3.1", -] - -[[package]] -name = "rand_jitter" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" -dependencies = [ - "libc", - "rand_core 0.4.2", - "winapi", -] - -[[package]] -name = "rand_os" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" -dependencies = [ - "cloudabi", - "fuchsia-cprng", - "libc", - "rand_core 0.4.2", - "rdrand", - "winapi", -] - -[[package]] -name = "rand_pcg" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" -dependencies = [ - "autocfg 0.1.8", - "rand_core 0.4.2", -] - -[[package]] -name = "rand_xorshift" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" -dependencies = [ - "rand_core 0.3.1", -] - [[package]] name = "rayon" version = "1.8.0" @@ -6938,15 +6750,6 @@ dependencies = [ "locspan-derive", ] -[[package]] -name = "rdrand" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" -dependencies = [ - "rand_core 0.3.1", -] - [[package]] name = "recon" version = "0.9.0" @@ -7076,8 +6879,8 @@ dependencies = [ "hyper-rustls", "ipnet", "js-sys", - "log 0.4.20", - "mime 0.3.17", + "log", + "mime", "once_cell", "percent-encoding 2.3.0", "pin-project-lite", @@ -7238,7 +7041,7 @@ checksum = "322c53fd76a18698f1c27381d58091de3a043d356aa5bd0d510608b565f469a0" dependencies = [ "async-global-executor", "futures", - "log 0.4.20", + "log", "netlink-packet-route", "netlink-proto", "nix 0.24.3", @@ -7309,7 +7112,7 @@ version = "0.21.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "629648aced5775d558af50b2b4c7b02983a04b312126d45eeead26e7caa498b9" dependencies = [ - "log 0.4.20", + "log", "ring 0.17.5", "rustls-webpki", "sct", @@ -7794,12 +7597,6 @@ dependencies = [ "thiserror", ] -[[package]] -name = "siphasher" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b8de496cf83d4ed58b6be86c3a275b8602f6ffe98d3024a869e124147a9a3ac" - [[package]] name = "siphasher" version = "0.3.11" @@ -7842,7 +7639,7 @@ version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" dependencies = [ - "autocfg 1.1.0", + "autocfg", ] [[package]] @@ -7930,7 +7727,7 @@ dependencies = [ "bytes 1.5.0", "futures", "httparse", - "log 0.4.20", + "log", "rand 0.8.5", "sha-1", ] @@ -8027,7 +7824,7 @@ dependencies = [ "hashlink", "hex", "indexmap 2.1.0", - "log 0.4.20", + "log", "memchr", "once_cell", "paste", @@ -8106,7 +7903,7 @@ dependencies = [ "hkdf", "hmac", "itoa", - "log 0.4.20", + "log", "md-5", "memchr", "once_cell", @@ -8146,7 +7943,7 @@ dependencies = [ "hmac", "home", "itoa", - "log 0.4.20", + "log", "md-5", "memchr", "once_cell", @@ -8177,7 +7974,7 @@ dependencies = [ "futures-intrusive", "futures-util", "libsqlite3-sys", - "log 0.4.20", + "log", "percent-encoding 2.3.0", "serde", "sqlx-core", @@ -8559,7 +8356,7 @@ dependencies = [ "new_debug_unreachable", "once_cell", "parking_lot", - "phf_shared 0.10.0", + "phf_shared", "precomputed-hash", ] @@ -8610,7 +8407,6 @@ dependencies = [ "hyper-old-types", "hyper-openssl", "hyper-tls", - "mime 0.3.17", "native-tls", "openssl", "serde", @@ -9181,7 +8977,7 @@ version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ - "log 0.4.20", + "log", "pin-project-lite", "tracing-attributes", "tracing-core", @@ -9224,7 +9020,7 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2" dependencies = [ - "log 0.4.20", + "log", "once_cell", "tracing-core", ] @@ -9235,7 +9031,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" dependencies = [ - "log 0.4.20", + "log", "once_cell", "tracing-core", ] @@ -9314,15 +9110,6 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" -[[package]] -name = "twoway" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59b11b2b5241ba34be09c3cc85a36e56e48f9888862e19cedf23336d35316ed1" -dependencies = [ - "memchr", -] - [[package]] name = "typed-arena" version = "2.0.2" @@ -9347,22 +9134,13 @@ dependencies = [ "static_assertions", ] -[[package]] -name = "unicase" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4765f83163b74f957c797ad9253caf97f103fb064d3999aea9568d09fc8a33" -dependencies = [ - "version_check 0.1.5", -] - [[package]] name = "unicase" version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" dependencies = [ - "version_check 0.9.4", + "version_check", ] [[package]] @@ -9557,12 +9335,6 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" -[[package]] -name = "version_check" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" - [[package]] name = "version_check" version = "0.9.4" @@ -9635,7 +9407,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e397f4664c0e4e428e8313a469aaa58310d302159845980fd23b0f22a847f217" dependencies = [ "bumpalo", - "log 0.4.20", + "log", "once_cell", "proc-macro2 1.0.76", "quote 1.0.35", @@ -9920,7 +9692,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0329ef377816896f014435162bb3711ea7a07729c23d0960e6f8048b21b8fe91" dependencies = [ "futures", - "log 0.4.20", + "log", "nohash-hasher", "parking_lot", "pin-project", diff --git a/Cargo.toml b/Cargo.toml index 69897e609..6ae910461 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,12 +6,13 @@ members = [ "core", "event", "kubo-rpc", + "kubo-rpc-server", "metadata", "metrics", - "kubo-rpc-server", "one", "p2p", "recon", + "store", "beetle/iroh-bitswap", "beetle/iroh-car", "beetle/iroh-rpc-client", @@ -54,6 +55,7 @@ ceramic-metadata = { path = "./metadata" } ceramic-metrics = { path = "./metrics" } ceramic-one = { path = "./one" } ceramic-p2p = { path = "./p2p" } +ceramic-store = { path = "./store" } cid = { version = "0.10", features = ["serde-codec"] } clap = { version = "4", features = ["derive", "env"] } clap_mangen = "0.2.2" diff --git a/core/Cargo.toml b/core/Cargo.toml index 1949f78a6..b5deadde1 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -7,8 +7,6 @@ license.workspace = true repository.workspace = true publish = false -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - [dependencies] anyhow.workspace = true async-trait.workspace = true diff --git a/core/src/event_id.rs b/core/src/event_id.rs index c9b323679..6baaaed32 100644 --- a/core/src/event_id.rs +++ b/core/src/event_id.rs @@ -325,6 +325,7 @@ pub struct WithEvent { impl BuilderState for WithEvent {} impl Builder { + /// Specify the network of the event pub fn with_network(self, network: &Network) -> Builder { // Maximum EventId size is 72. // @@ -350,8 +351,9 @@ impl Builder { } } impl Builder { - // TODO sort_value should be bytes not str + /// Specify the sort key and value of the event pub fn with_sort_value(mut self, sort_key: &str, sort_value: &str) -> Builder { + // TODO sort_value should be bytes not str self.state.bytes.extend(last8_bytes(&sha256_digest(&format!( "{}|{}", sort_key, sort_value, @@ -364,6 +366,7 @@ impl Builder { } } impl Builder { + /// Specify that the minimum controller value should be used for the event pub fn with_min_controller(mut self) -> Builder { self.state.bytes.extend(ZEROS_8); Builder { @@ -372,6 +375,7 @@ impl Builder { }, } } + /// Specify that the maximum controller value should be used for the event pub fn with_max_controller(mut self) -> Builder { self.state.bytes.extend(FFS_8); Builder { @@ -380,6 +384,7 @@ impl Builder { }, } } + /// Specify the controller for the event pub fn with_controller(mut self, controller: &str) -> Builder { self.state .bytes @@ -392,6 +397,7 @@ impl Builder { } } impl Builder { + /// Specify that the minimum init value should be used for the event pub fn with_min_init(mut self) -> Builder { self.state.bytes.extend(ZEROS_4); Builder { @@ -400,6 +406,7 @@ impl Builder { }, } } + /// Specify that the maximum init value should be used for the event pub fn with_max_init(mut self) -> Builder { self.state.bytes.extend(FFS_4); Builder { @@ -408,6 +415,7 @@ impl Builder { }, } } + /// Specify the init cid of the event pub fn with_init(mut self, init: &Cid) -> Builder { self.state .bytes @@ -420,6 +428,7 @@ impl Builder { } } impl Builder { + /// Specify that the minimum event height should be used for the event pub fn with_min_event_height(mut self) -> Builder { // 0x00 is the cbor encoding of 0. self.state.bytes.push(0x00); @@ -429,6 +438,7 @@ impl Builder { }, } } + /// Specify that the maximum event height should be used for the event pub fn with_max_event_height(mut self) -> Builder { // 0xFF is the break stop code in CBOR, and will sort higher than any cbor encoded unsigned // integer. @@ -439,6 +449,7 @@ impl Builder { }, } } + /// Specify event height for the event pub fn with_event_height(mut self, event_height: u64) -> Builder { let event_height_cbor = minicbor::to_vec(event_height).unwrap(); // event_height cbor unsigned int @@ -451,10 +462,13 @@ impl Builder { } } impl Builder { - /// Builds the final EventId as a fencepost + /// Builds the final EventId as a fencepost. + /// A fencepost is a value that sorts before and after specific events but is itself not a + /// complete EventId. pub fn build_fencepost(self) -> EventId { EventId(self.state.bytes) } + /// Specify the event cid pub fn with_event(mut self, event: &Cid) -> Builder { self.state.bytes.extend(event.to_bytes()); Builder { diff --git a/core/src/lib.rs b/core/src/lib.rs index 1e5bd0ade..3dc15f62d 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -2,7 +2,7 @@ //! Core functionality for ceramic, including the StreamId, Cid, and Jws types. #![warn(missing_docs)] mod bytes; -mod event_id; +pub mod event_id; mod interest; mod jwk; mod jws; diff --git a/kubo-rpc-server/.openapi-generator/FILES b/kubo-rpc-server/.openapi-generator/FILES index 6df69a957..54773028c 100644 --- a/kubo-rpc-server/.openapi-generator/FILES +++ b/kubo-rpc-server/.openapi-generator/FILES @@ -3,11 +3,8 @@ Cargo.toml README.md api/openapi.yaml -docs/BlockPutPost200Response.md +docs/BlockStatPost200Response.md docs/Codecs.md -docs/DagImportPost200Response.md -docs/DagPutPost200Response.md -docs/DagPutPost200ResponseCid.md docs/DagResolvePost200Response.md docs/DagResolvePost200ResponseCid.md docs/Error.md diff --git a/kubo-rpc-server/Cargo.toml b/kubo-rpc-server/Cargo.toml index 95bb155ac..f842e3dbc 100644 --- a/kubo-rpc-server/Cargo.toml +++ b/kubo-rpc-server/Cargo.toml @@ -9,13 +9,9 @@ edition = "2018" [features] default = ["client", "server"] client = [ - "mime_0_2", - "multipart", "multipart/client", "swagger/multipart_form", "hyper", "hyper-openssl", "hyper-tls", "native-tls", "openssl", "url" ] server = [ - "mime_0_2", - "multipart", "multipart/server", "serde_ignored", "hyper", "regex", "percent-encoding", "url", "lazy_static" ] @@ -42,8 +38,6 @@ serde_json = "1.0" validator = { version = "0.16", features = ["derive"] } # Crates included if required by the API definition -mime_0_2 = { package = "mime", version = "0.2.6", optional = true } -multipart = { version = "0.16", default-features = false, optional = true } # Common between server and client features hyper = {version = "0.14", features = ["full"], optional = true} diff --git a/kubo-rpc-server/README.md b/kubo-rpc-server/README.md index ad9192977..e9e29e950 100644 --- a/kubo-rpc-server/README.md +++ b/kubo-rpc-server/README.md @@ -15,7 +15,7 @@ To see how to make this your own, look here: [README]((https://openapi-generator.tech)) - API version: 0.9.0 -- Build date: 2024-01-24T14:39:45.959361295-07:00[America/Denver] +- Build date: 2024-01-26T13:55:46.024253330-07:00[America/Denver] @@ -63,11 +63,8 @@ To run a client, follow one of the following simple steps: ``` cargo run --example client BlockGetPost -cargo run --example client BlockPutPost cargo run --example client BlockStatPost cargo run --example client DagGetPost -cargo run --example client DagImportPost -cargo run --example client DagPutPost cargo run --example client DagResolvePost cargo run --example client IdPost cargo run --example client PinAddPost @@ -109,11 +106,8 @@ All URIs are relative to */api/v0* Method | HTTP request | Description ------------- | ------------- | ------------- [****](docs/default_api.md#) | **POST** /block/get | Get a single IPFS block -[****](docs/default_api.md#) | **POST** /block/put | Put a single IPFS block [****](docs/default_api.md#) | **POST** /block/stat | Report statistics about a block [****](docs/default_api.md#) | **POST** /dag/get | Get an IPLD node from IPFS -[****](docs/default_api.md#) | **POST** /dag/import | Import a CAR file of IPLD nodes into IPFS -[****](docs/default_api.md#) | **POST** /dag/put | Put an IPLD node into IPFS [****](docs/default_api.md#) | **POST** /dag/resolve | Resolve an IPFS path to a DAG node [****](docs/default_api.md#) | **POST** /id | Report identifying information about a node [****](docs/default_api.md#) | **POST** /pin/add | Add a block to the pin store @@ -125,11 +119,8 @@ Method | HTTP request | Description ## Documentation For Models - - [BlockPutPost200Response](docs/BlockPutPost200Response.md) + - [BlockStatPost200Response](docs/BlockStatPost200Response.md) - [Codecs](docs/Codecs.md) - - [DagImportPost200Response](docs/DagImportPost200Response.md) - - [DagPutPost200Response](docs/DagPutPost200Response.md) - - [DagPutPost200ResponseCid](docs/DagPutPost200ResponseCid.md) - [DagResolvePost200Response](docs/DagResolvePost200Response.md) - [DagResolvePost200ResponseCid](docs/DagResolvePost200ResponseCid.md) - [Error](docs/Error.md) diff --git a/kubo-rpc-server/api/openapi.yaml b/kubo-rpc-server/api/openapi.yaml index 8c5b5e861..640ed3ca7 100644 --- a/kubo-rpc-server/api/openapi.yaml +++ b/kubo-rpc-server/api/openapi.yaml @@ -46,65 +46,6 @@ paths: $ref: '#/components/schemas/Error' description: bad request summary: Get an IPLD node from IPFS - /dag/put: - post: - parameters: - - description: IPFS path to DAG node - explode: true - in: query - name: store-codec - required: false - schema: - $ref: '#/components/schemas/Codecs' - style: form - - description: Output encoding of the data - explode: true - in: query - name: input-codec - required: false - schema: - $ref: '#/components/schemas/Codecs' - style: form - requestBody: - content: - multipart/form-data: - schema: - $ref: '#/components/schemas/_dag_put_post_request' - responses: - "200": - content: - application/json: - schema: - $ref: '#/components/schemas/_dag_put_post_200_response' - description: success - "400": - content: - application/json: - schema: - $ref: '#/components/schemas/Error' - description: bad request - summary: Put an IPLD node into IPFS - /dag/import: - post: - requestBody: - content: - multipart/form-data: - schema: - $ref: '#/components/schemas/_dag_put_post_request' - responses: - "200": - content: - application/json: - schema: - $ref: '#/components/schemas/_dag_import_post_200_response' - description: success - "400": - content: - application/json: - schema: - $ref: '#/components/schemas/Error' - description: bad request - summary: Import a CAR file of IPLD nodes into IPFS /dag/resolve: post: parameters: @@ -179,52 +120,6 @@ paths: $ref: '#/components/schemas/Error' description: internal error summary: Get a single IPFS block - /block/put: - post: - parameters: - - description: Codec of the block data - explode: true - in: query - name: cid-codec - required: false - schema: - $ref: '#/components/schemas/Codecs' - style: form - - description: Multihash type - explode: true - in: query - name: mhtype - required: false - schema: - $ref: '#/components/schemas/Multihash' - style: form - - description: Whether to recursively pin the block - explode: true - in: query - name: pin - required: false - schema: - type: bool - style: form - requestBody: - content: - multipart/form-data: - schema: - $ref: '#/components/schemas/_dag_put_post_request' - responses: - "200": - content: - application/json: - schema: - $ref: '#/components/schemas/_block_put_post_200_response' - description: success - "400": - content: - application/json: - schema: - $ref: '#/components/schemas/Error' - description: bad request - summary: Put a single IPFS block /block/stat: post: parameters: @@ -241,7 +136,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/_block_put_post_200_response' + $ref: '#/components/schemas/_block_stat_post_200_response' description: success "400": content: @@ -426,44 +321,6 @@ components: - Message - Type type: object - _dag_put_post_request: - properties: - file: - format: byte - type: string - required: - - file - type: object - _dag_put_post_200_response_Cid: - example: - /: / - properties: - /: - type: string - required: - - / - type: object - _dag_put_post_200_response: - example: - Cid: - /: / - properties: - Cid: - $ref: '#/components/schemas/_dag_put_post_200_response_Cid' - required: - - Cid - type: object - _dag_import_post_200_response: - example: - Root: - Cid: - /: / - properties: - Root: - $ref: '#/components/schemas/_dag_put_post_200_response' - required: - - Root - type: object _dag_resolve_post_200_response_Cid: description: root cid example: @@ -490,7 +347,7 @@ components: - Cid - RemPath type: object - _block_put_post_200_response: + _block_stat_post_200_response: example: Size: 0.8008281904610115 Key: Key diff --git a/kubo-rpc-server/docs/BlockStatPost200Response.md b/kubo-rpc-server/docs/BlockStatPost200Response.md new file mode 100644 index 000000000..fb2edcec3 --- /dev/null +++ b/kubo-rpc-server/docs/BlockStatPost200Response.md @@ -0,0 +1,11 @@ +# BlockStatPost200Response + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**key** | **String** | | +**size** | **f64** | | + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/kubo-rpc-server/docs/default_api.md b/kubo-rpc-server/docs/default_api.md index b71463129..01c7f23b9 100644 --- a/kubo-rpc-server/docs/default_api.md +++ b/kubo-rpc-server/docs/default_api.md @@ -5,11 +5,8 @@ All URIs are relative to */api/v0* Method | HTTP request | Description ------------- | ------------- | ------------- ****](default_api.md#) | **POST** /block/get | Get a single IPFS block -****](default_api.md#) | **POST** /block/put | Put a single IPFS block ****](default_api.md#) | **POST** /block/stat | Report statistics about a block ****](default_api.md#) | **POST** /dag/get | Get an IPLD node from IPFS -****](default_api.md#) | **POST** /dag/import | Import a CAR file of IPLD nodes into IPFS -****](default_api.md#) | **POST** /dag/put | Put an IPLD node into IPFS ****](default_api.md#) | **POST** /dag/resolve | Resolve an IPFS path to a DAG node ****](default_api.md#) | **POST** /id | Report identifying information about a node ****](default_api.md#) | **POST** /pin/add | Add a block to the pin store @@ -55,43 +52,7 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **** -> models::BlockPutPost200Response (file, optional) -Put a single IPFS block - -### Required Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **file** | **swagger::ByteArray**| | - **optional** | **map[string]interface{}** | optional parameters | nil if no parameters - -### Optional Parameters -Optional parameters are passed through a map[string]interface{}. - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **file** | **swagger::ByteArray**| | - **cid_codec** | [****](.md)| Codec of the block data | - **mhtype** | [****](.md)| Multihash type | - **pin** | [****](.md)| Whether to recursively pin the block | - -### Return type - -[**models::BlockPutPost200Response**](_block_put_post_200_response.md) - -### Authorization - -No authorization required - -### HTTP request headers - - - **Content-Type**: multipart/form-data - - **Accept**: application/json - -[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) - -# **** -> models::BlockPutPost200Response (arg) +> models::BlockStatPost200Response (arg) Report statistics about a block ### Required Parameters @@ -102,7 +63,7 @@ Name | Type | Description | Notes ### Return type -[**models::BlockPutPost200Response**](_block_put_post_200_response.md) +[**models::BlockStatPost200Response**](_block_stat_post_200_response.md) ### Authorization @@ -149,66 +110,6 @@ No authorization required [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) -# **** -> models::DagImportPost200Response (file) -Import a CAR file of IPLD nodes into IPFS - -### Required Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **file** | **swagger::ByteArray**| | - -### Return type - -[**models::DagImportPost200Response**](_dag_import_post_200_response.md) - -### Authorization - -No authorization required - -### HTTP request headers - - - **Content-Type**: multipart/form-data - - **Accept**: application/json - -[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) - -# **** -> models::DagPutPost200Response (file, optional) -Put an IPLD node into IPFS - -### Required Parameters - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **file** | **swagger::ByteArray**| | - **optional** | **map[string]interface{}** | optional parameters | nil if no parameters - -### Optional Parameters -Optional parameters are passed through a map[string]interface{}. - -Name | Type | Description | Notes -------------- | ------------- | ------------- | ------------- - **file** | **swagger::ByteArray**| | - **store_codec** | [****](.md)| IPFS path to DAG node | - **input_codec** | [****](.md)| Output encoding of the data | - -### Return type - -[**models::DagPutPost200Response**](_dag_put_post_200_response.md) - -### Authorization - -No authorization required - -### HTTP request headers - - - **Content-Type**: multipart/form-data - - **Accept**: application/json - -[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) - # **** > models::DagResolvePost200Response (arg) Resolve an IPFS path to a DAG node diff --git a/kubo-rpc-server/examples/client/main.rs b/kubo-rpc-server/examples/client/main.rs index 8c6bc6dc2..7e13ec0f7 100644 --- a/kubo-rpc-server/examples/client/main.rs +++ b/kubo-rpc-server/examples/client/main.rs @@ -2,10 +2,10 @@ #[allow(unused_imports)] use ceramic_kubo_rpc_server::{ - models, Api, ApiNoContext, BlockGetPostResponse, BlockPutPostResponse, BlockStatPostResponse, - Client, ContextWrapperExt, DagGetPostResponse, DagImportPostResponse, DagPutPostResponse, - DagResolvePostResponse, IdPostResponse, PinAddPostResponse, PinRmPostResponse, - SwarmConnectPostResponse, SwarmPeersPostResponse, VersionPostResponse, + models, Api, ApiNoContext, BlockGetPostResponse, BlockStatPostResponse, Client, + ContextWrapperExt, DagGetPostResponse, DagResolvePostResponse, IdPostResponse, + PinAddPostResponse, PinRmPostResponse, SwarmConnectPostResponse, SwarmPeersPostResponse, + VersionPostResponse, }; use clap::{App, Arg}; #[allow(unused_imports)] @@ -36,11 +36,8 @@ fn main() { .help("Sets the operation to run") .possible_values(&[ "BlockGetPost", - "BlockPutPost", "BlockStatPost", "DagGetPost", - "DagImportPost", - "DagPutPost", "DagResolvePost", "IdPost", "PinAddPost", @@ -115,19 +112,6 @@ fn main() { (client.context() as &dyn Has).get().clone() ); } - Some("BlockPutPost") => { - let result = rt.block_on(client.block_put_post( - swagger::ByteArray(Vec::from("BYTE_ARRAY_DATA_HERE")), - None, - None, - None, - )); - info!( - "{:?} (X-Span-ID: {:?})", - result, - (client.context() as &dyn Has).get().clone() - ); - } Some("BlockStatPost") => { let result = rt.block_on(client.block_stat_post("arg_example".to_string())); info!( @@ -144,28 +128,6 @@ fn main() { (client.context() as &dyn Has).get().clone() ); } - Some("DagImportPost") => { - let result = rt.block_on( - client.dag_import_post(swagger::ByteArray(Vec::from("BYTE_ARRAY_DATA_HERE"))), - ); - info!( - "{:?} (X-Span-ID: {:?})", - result, - (client.context() as &dyn Has).get().clone() - ); - } - Some("DagPutPost") => { - let result = rt.block_on(client.dag_put_post( - swagger::ByteArray(Vec::from("BYTE_ARRAY_DATA_HERE")), - None, - None, - )); - info!( - "{:?} (X-Span-ID: {:?})", - result, - (client.context() as &dyn Has).get().clone() - ); - } Some("DagResolvePost") => { let result = rt.block_on(client.dag_resolve_post("arg_example".to_string())); info!( diff --git a/kubo-rpc-server/examples/server/server.rs b/kubo-rpc-server/examples/server/server.rs index 05d73037f..1e63430d2 100644 --- a/kubo-rpc-server/examples/server/server.rs +++ b/kubo-rpc-server/examples/server/server.rs @@ -101,10 +101,9 @@ impl Server { use ceramic_kubo_rpc_server::server::MakeService; use ceramic_kubo_rpc_server::{ - Api, BlockGetPostResponse, BlockPutPostResponse, BlockStatPostResponse, DagGetPostResponse, - DagImportPostResponse, DagPutPostResponse, DagResolvePostResponse, IdPostResponse, - PinAddPostResponse, PinRmPostResponse, SwarmConnectPostResponse, SwarmPeersPostResponse, - VersionPostResponse, + Api, BlockGetPostResponse, BlockStatPostResponse, DagGetPostResponse, DagResolvePostResponse, + IdPostResponse, PinAddPostResponse, PinRmPostResponse, SwarmConnectPostResponse, + SwarmPeersPostResponse, VersionPostResponse, }; use std::error::Error; use swagger::ApiError; @@ -132,26 +131,6 @@ where Err(ApiError("Generic failure".into())) } - /// Put a single IPFS block - async fn block_put_post( - &self, - file: swagger::ByteArray, - cid_codec: Option, - mhtype: Option, - pin: Option, - context: &C, - ) -> Result { - info!( - "block_put_post({:?}, {:?}, {:?}, {:?}) - X-Span-ID: {:?}", - file, - cid_codec, - mhtype, - pin, - context.get().0.clone() - ); - Err(ApiError("Generic failure".into())) - } - /// Report statistics about a block async fn block_stat_post( &self, @@ -182,38 +161,6 @@ where Err(ApiError("Generic failure".into())) } - /// Import a CAR file of IPLD nodes into IPFS - async fn dag_import_post( - &self, - file: swagger::ByteArray, - context: &C, - ) -> Result { - info!( - "dag_import_post({:?}) - X-Span-ID: {:?}", - file, - context.get().0.clone() - ); - Err(ApiError("Generic failure".into())) - } - - /// Put an IPLD node into IPFS - async fn dag_put_post( - &self, - file: swagger::ByteArray, - store_codec: Option, - input_codec: Option, - context: &C, - ) -> Result { - info!( - "dag_put_post({:?}, {:?}, {:?}) - X-Span-ID: {:?}", - file, - store_codec, - input_codec, - context.get().0.clone() - ); - Err(ApiError("Generic failure".into())) - } - /// Resolve an IPFS path to a DAG node async fn dag_resolve_post( &self, diff --git a/kubo-rpc-server/src/client/mod.rs b/kubo-rpc-server/src/client/mod.rs index cdeb49a24..07a66376d 100644 --- a/kubo-rpc-server/src/client/mod.rs +++ b/kubo-rpc-server/src/client/mod.rs @@ -22,10 +22,6 @@ use std::task::{Context, Poll}; use swagger::{ApiError, AuthData, BodyExt, Connector, DropContextService, Has, XSpanIdString}; use url::form_urlencoded; -use mime::Mime; -use multipart::client::lazy::Multipart; -use std::io::Cursor; - use crate::header; use crate::models; @@ -46,10 +42,9 @@ const FRAGMENT_ENCODE_SET: &AsciiSet = &percent_encoding::CONTROLS const ID_ENCODE_SET: &AsciiSet = &FRAGMENT_ENCODE_SET.add(b'|'); use crate::{ - Api, BlockGetPostResponse, BlockPutPostResponse, BlockStatPostResponse, DagGetPostResponse, - DagImportPostResponse, DagPutPostResponse, DagResolvePostResponse, IdPostResponse, - PinAddPostResponse, PinRmPostResponse, SwarmConnectPostResponse, SwarmPeersPostResponse, - VersionPostResponse, + Api, BlockGetPostResponse, BlockStatPostResponse, DagGetPostResponse, DagResolvePostResponse, + IdPostResponse, PinAddPostResponse, PinRmPostResponse, SwarmConnectPostResponse, + SwarmPeersPostResponse, VersionPostResponse, }; /// Convert input into a base path, e.g. "http://example:123". Also checks the scheme as it goes. @@ -511,167 +506,6 @@ where } } - async fn block_put_post( - &self, - param_file: swagger::ByteArray, - param_cid_codec: Option, - param_mhtype: Option, - param_pin: Option, - context: &C, - ) -> Result { - let mut client_service = self.client_service.clone(); - let mut uri = format!("{}/api/v0/block/put", self.base_path); - - // Query parameters - let query_string = { - let mut query_string = form_urlencoded::Serializer::new("".to_owned()); - if let Some(param_cid_codec) = param_cid_codec { - query_string.append_pair("cid-codec", ¶m_cid_codec.to_string()); - } - if let Some(param_mhtype) = param_mhtype { - query_string.append_pair("mhtype", ¶m_mhtype.to_string()); - } - if let Some(param_pin) = param_pin { - query_string.append_pair("pin", ¶m_pin.to_string()); - } - query_string.finish() - }; - if !query_string.is_empty() { - uri += "?"; - uri += &query_string; - } - - let uri = match Uri::from_str(&uri) { - Ok(uri) => uri, - Err(err) => return Err(ApiError(format!("Unable to build URI: {}", err))), - }; - - let mut request = match Request::builder() - .method("POST") - .uri(uri) - .body(Body::empty()) - { - Ok(req) => req, - Err(e) => return Err(ApiError(format!("Unable to create request: {}", e))), - }; - - let (body_string, multipart_header) = { - let mut multipart = Multipart::new(); - - // For each parameter, encode as appropriate and add to the multipart body as a stream. - - let file_vec = param_file.to_vec(); - - let file_mime = match mime_0_2::Mime::from_str("application/octet-stream") { - Ok(mime) => mime, - Err(err) => return Err(ApiError(format!("Unable to get mime type: {:?}", err))), - }; - - let file_cursor = Cursor::new(file_vec); - - let filename = None as Option<&str>; - multipart.add_stream("file", file_cursor, filename, Some(file_mime)); - - let mut fields = match multipart.prepare() { - Ok(fields) => fields, - Err(err) => return Err(ApiError(format!("Unable to build request: {}", err))), - }; - - let mut body_string = String::new(); - - match fields.read_to_string(&mut body_string) { - Ok(_) => (), - Err(err) => return Err(ApiError(format!("Unable to build body: {}", err))), - } - - let boundary = fields.boundary(); - - let multipart_header = format!("multipart/form-data;boundary={}", boundary); - - (body_string, multipart_header) - }; - - *request.body_mut() = Body::from(body_string); - - request.headers_mut().insert( - CONTENT_TYPE, - match HeaderValue::from_str(&multipart_header) { - Ok(h) => h, - Err(e) => { - return Err(ApiError(format!( - "Unable to create header: {} - {}", - multipart_header, e - ))) - } - }, - ); - - let header = HeaderValue::from_str(Has::::get(context).0.as_str()); - request.headers_mut().insert( - HeaderName::from_static("x-span-id"), - match header { - Ok(h) => h, - Err(e) => { - return Err(ApiError(format!( - "Unable to create X-Span ID header value: {}", - e - ))) - } - }, - ); - - let response = client_service - .call((request, context.clone())) - .map_err(|e| ApiError(format!("No response received: {}", e))) - .await?; - - match response.status().as_u16() { - 200 => { - let body = response.into_body(); - let body = body - .into_raw() - .map_err(|e| ApiError(format!("Failed to read response: {}", e))) - .await?; - let body = str::from_utf8(&body) - .map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; - let body = - serde_json::from_str::(body).map_err(|e| { - ApiError(format!("Response body did not match the schema: {}", e)) - })?; - Ok(BlockPutPostResponse::Success(body)) - } - 400 => { - let body = response.into_body(); - let body = body - .into_raw() - .map_err(|e| ApiError(format!("Failed to read response: {}", e))) - .await?; - let body = str::from_utf8(&body) - .map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; - let body = serde_json::from_str::(body).map_err(|e| { - ApiError(format!("Response body did not match the schema: {}", e)) - })?; - Ok(BlockPutPostResponse::BadRequest(body)) - } - code => { - let headers = response.headers().clone(); - let body = response.into_body().take(100).into_raw().await; - Err(ApiError(format!( - "Unexpected response code {}:\n{:?}\n\n{}", - code, - headers, - match body { - Ok(body) => match String::from_utf8(body) { - Ok(body) => body, - Err(e) => format!("", e), - }, - Err(e) => format!("", e), - } - ))) - } - } - } - async fn block_stat_post( &self, param_arg: String, @@ -733,10 +567,9 @@ where .await?; let body = str::from_utf8(&body) .map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; - let body = - serde_json::from_str::(body).map_err(|e| { - ApiError(format!("Response body did not match the schema: {}", e)) - })?; + let body = serde_json::from_str::(body).map_err( + |e| ApiError(format!("Response body did not match the schema: {}", e)), + )?; Ok(BlockStatPostResponse::Success(body)) } 400 => { @@ -869,311 +702,6 @@ where } } - async fn dag_import_post( - &self, - param_file: swagger::ByteArray, - context: &C, - ) -> Result { - let mut client_service = self.client_service.clone(); - let mut uri = format!("{}/api/v0/dag/import", self.base_path); - - // Query parameters - let query_string = { - let mut query_string = form_urlencoded::Serializer::new("".to_owned()); - query_string.finish() - }; - if !query_string.is_empty() { - uri += "?"; - uri += &query_string; - } - - let uri = match Uri::from_str(&uri) { - Ok(uri) => uri, - Err(err) => return Err(ApiError(format!("Unable to build URI: {}", err))), - }; - - let mut request = match Request::builder() - .method("POST") - .uri(uri) - .body(Body::empty()) - { - Ok(req) => req, - Err(e) => return Err(ApiError(format!("Unable to create request: {}", e))), - }; - - let (body_string, multipart_header) = { - let mut multipart = Multipart::new(); - - // For each parameter, encode as appropriate and add to the multipart body as a stream. - - let file_vec = param_file.to_vec(); - - let file_mime = match mime_0_2::Mime::from_str("application/octet-stream") { - Ok(mime) => mime, - Err(err) => return Err(ApiError(format!("Unable to get mime type: {:?}", err))), - }; - - let file_cursor = Cursor::new(file_vec); - - let filename = None as Option<&str>; - multipart.add_stream("file", file_cursor, filename, Some(file_mime)); - - let mut fields = match multipart.prepare() { - Ok(fields) => fields, - Err(err) => return Err(ApiError(format!("Unable to build request: {}", err))), - }; - - let mut body_string = String::new(); - - match fields.read_to_string(&mut body_string) { - Ok(_) => (), - Err(err) => return Err(ApiError(format!("Unable to build body: {}", err))), - } - - let boundary = fields.boundary(); - - let multipart_header = format!("multipart/form-data;boundary={}", boundary); - - (body_string, multipart_header) - }; - - *request.body_mut() = Body::from(body_string); - - request.headers_mut().insert( - CONTENT_TYPE, - match HeaderValue::from_str(&multipart_header) { - Ok(h) => h, - Err(e) => { - return Err(ApiError(format!( - "Unable to create header: {} - {}", - multipart_header, e - ))) - } - }, - ); - - let header = HeaderValue::from_str(Has::::get(context).0.as_str()); - request.headers_mut().insert( - HeaderName::from_static("x-span-id"), - match header { - Ok(h) => h, - Err(e) => { - return Err(ApiError(format!( - "Unable to create X-Span ID header value: {}", - e - ))) - } - }, - ); - - let response = client_service - .call((request, context.clone())) - .map_err(|e| ApiError(format!("No response received: {}", e))) - .await?; - - match response.status().as_u16() { - 200 => { - let body = response.into_body(); - let body = body - .into_raw() - .map_err(|e| ApiError(format!("Failed to read response: {}", e))) - .await?; - let body = str::from_utf8(&body) - .map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; - let body = serde_json::from_str::(body).map_err( - |e| ApiError(format!("Response body did not match the schema: {}", e)), - )?; - Ok(DagImportPostResponse::Success(body)) - } - 400 => { - let body = response.into_body(); - let body = body - .into_raw() - .map_err(|e| ApiError(format!("Failed to read response: {}", e))) - .await?; - let body = str::from_utf8(&body) - .map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; - let body = serde_json::from_str::(body).map_err(|e| { - ApiError(format!("Response body did not match the schema: {}", e)) - })?; - Ok(DagImportPostResponse::BadRequest(body)) - } - code => { - let headers = response.headers().clone(); - let body = response.into_body().take(100).into_raw().await; - Err(ApiError(format!( - "Unexpected response code {}:\n{:?}\n\n{}", - code, - headers, - match body { - Ok(body) => match String::from_utf8(body) { - Ok(body) => body, - Err(e) => format!("", e), - }, - Err(e) => format!("", e), - } - ))) - } - } - } - - async fn dag_put_post( - &self, - param_file: swagger::ByteArray, - param_store_codec: Option, - param_input_codec: Option, - context: &C, - ) -> Result { - let mut client_service = self.client_service.clone(); - let mut uri = format!("{}/api/v0/dag/put", self.base_path); - - // Query parameters - let query_string = { - let mut query_string = form_urlencoded::Serializer::new("".to_owned()); - if let Some(param_store_codec) = param_store_codec { - query_string.append_pair("store-codec", ¶m_store_codec.to_string()); - } - if let Some(param_input_codec) = param_input_codec { - query_string.append_pair("input-codec", ¶m_input_codec.to_string()); - } - query_string.finish() - }; - if !query_string.is_empty() { - uri += "?"; - uri += &query_string; - } - - let uri = match Uri::from_str(&uri) { - Ok(uri) => uri, - Err(err) => return Err(ApiError(format!("Unable to build URI: {}", err))), - }; - - let mut request = match Request::builder() - .method("POST") - .uri(uri) - .body(Body::empty()) - { - Ok(req) => req, - Err(e) => return Err(ApiError(format!("Unable to create request: {}", e))), - }; - - let (body_string, multipart_header) = { - let mut multipart = Multipart::new(); - - // For each parameter, encode as appropriate and add to the multipart body as a stream. - - let file_vec = param_file.to_vec(); - - let file_mime = match mime_0_2::Mime::from_str("application/octet-stream") { - Ok(mime) => mime, - Err(err) => return Err(ApiError(format!("Unable to get mime type: {:?}", err))), - }; - - let file_cursor = Cursor::new(file_vec); - - let filename = None as Option<&str>; - multipart.add_stream("file", file_cursor, filename, Some(file_mime)); - - let mut fields = match multipart.prepare() { - Ok(fields) => fields, - Err(err) => return Err(ApiError(format!("Unable to build request: {}", err))), - }; - - let mut body_string = String::new(); - - match fields.read_to_string(&mut body_string) { - Ok(_) => (), - Err(err) => return Err(ApiError(format!("Unable to build body: {}", err))), - } - - let boundary = fields.boundary(); - - let multipart_header = format!("multipart/form-data;boundary={}", boundary); - - (body_string, multipart_header) - }; - - *request.body_mut() = Body::from(body_string); - - request.headers_mut().insert( - CONTENT_TYPE, - match HeaderValue::from_str(&multipart_header) { - Ok(h) => h, - Err(e) => { - return Err(ApiError(format!( - "Unable to create header: {} - {}", - multipart_header, e - ))) - } - }, - ); - - let header = HeaderValue::from_str(Has::::get(context).0.as_str()); - request.headers_mut().insert( - HeaderName::from_static("x-span-id"), - match header { - Ok(h) => h, - Err(e) => { - return Err(ApiError(format!( - "Unable to create X-Span ID header value: {}", - e - ))) - } - }, - ); - - let response = client_service - .call((request, context.clone())) - .map_err(|e| ApiError(format!("No response received: {}", e))) - .await?; - - match response.status().as_u16() { - 200 => { - let body = response.into_body(); - let body = body - .into_raw() - .map_err(|e| ApiError(format!("Failed to read response: {}", e))) - .await?; - let body = str::from_utf8(&body) - .map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; - let body = - serde_json::from_str::(body).map_err(|e| { - ApiError(format!("Response body did not match the schema: {}", e)) - })?; - Ok(DagPutPostResponse::Success(body)) - } - 400 => { - let body = response.into_body(); - let body = body - .into_raw() - .map_err(|e| ApiError(format!("Failed to read response: {}", e))) - .await?; - let body = str::from_utf8(&body) - .map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; - let body = serde_json::from_str::(body).map_err(|e| { - ApiError(format!("Response body did not match the schema: {}", e)) - })?; - Ok(DagPutPostResponse::BadRequest(body)) - } - code => { - let headers = response.headers().clone(); - let body = response.into_body().take(100).into_raw().await; - Err(ApiError(format!( - "Unexpected response code {}:\n{:?}\n\n{}", - code, - headers, - match body { - Ok(body) => match String::from_utf8(body) { - Ok(body) => body, - Err(e) => format!("", e), - }, - Err(e) => format!("", e), - } - ))) - } - } - } - async fn dag_resolve_post( &self, param_arg: String, diff --git a/kubo-rpc-server/src/lib.rs b/kubo-rpc-server/src/lib.rs index c6781dde8..155ac1773 100644 --- a/kubo-rpc-server/src/lib.rs +++ b/kubo-rpc-server/src/lib.rs @@ -33,20 +33,11 @@ pub enum BlockGetPostResponse { InternalError(models::Error), } -#[derive(Debug, PartialEq, Serialize, Deserialize)] -#[must_use] -pub enum BlockPutPostResponse { - /// success - Success(models::BlockPutPost200Response), - /// bad request - BadRequest(models::Error), -} - #[derive(Debug, PartialEq, Serialize, Deserialize)] #[must_use] pub enum BlockStatPostResponse { /// success - Success(models::BlockPutPost200Response), + Success(models::BlockStatPost200Response), /// bad request BadRequest(models::Error), } @@ -60,24 +51,6 @@ pub enum DagGetPostResponse { BadRequest(models::Error), } -#[derive(Debug, PartialEq, Serialize, Deserialize)] -#[must_use] -pub enum DagImportPostResponse { - /// success - Success(models::DagImportPost200Response), - /// bad request - BadRequest(models::Error), -} - -#[derive(Debug, PartialEq, Serialize, Deserialize)] -#[must_use] -pub enum DagPutPostResponse { - /// success - Success(models::DagPutPost200Response), - /// bad request - BadRequest(models::Error), -} - #[derive(Debug, PartialEq, Serialize, Deserialize)] #[must_use] pub enum DagResolvePostResponse { @@ -161,16 +134,6 @@ pub trait Api { context: &C, ) -> Result; - /// Put a single IPFS block - async fn block_put_post( - &self, - file: swagger::ByteArray, - cid_codec: Option, - mhtype: Option, - pin: Option, - context: &C, - ) -> Result; - /// Report statistics about a block async fn block_stat_post( &self, @@ -186,22 +149,6 @@ pub trait Api { context: &C, ) -> Result; - /// Import a CAR file of IPLD nodes into IPFS - async fn dag_import_post( - &self, - file: swagger::ByteArray, - context: &C, - ) -> Result; - - /// Put an IPLD node into IPFS - async fn dag_put_post( - &self, - file: swagger::ByteArray, - store_codec: Option, - input_codec: Option, - context: &C, - ) -> Result; - /// Resolve an IPFS path to a DAG node async fn dag_resolve_post( &self, @@ -257,15 +204,6 @@ pub trait ApiNoContext { offline: Option, ) -> Result; - /// Put a single IPFS block - async fn block_put_post( - &self, - file: swagger::ByteArray, - cid_codec: Option, - mhtype: Option, - pin: Option, - ) -> Result; - /// Report statistics about a block async fn block_stat_post(&self, arg: String) -> Result; @@ -276,20 +214,6 @@ pub trait ApiNoContext { output_codec: Option, ) -> Result; - /// Import a CAR file of IPLD nodes into IPFS - async fn dag_import_post( - &self, - file: swagger::ByteArray, - ) -> Result; - - /// Put an IPLD node into IPFS - async fn dag_put_post( - &self, - file: swagger::ByteArray, - store_codec: Option, - input_codec: Option, - ) -> Result; - /// Resolve an IPFS path to a DAG node async fn dag_resolve_post(&self, arg: String) -> Result; @@ -358,20 +282,6 @@ impl + Send + Sync, C: Clone + Send + Sync> ApiNoContext for Contex .await } - /// Put a single IPFS block - async fn block_put_post( - &self, - file: swagger::ByteArray, - cid_codec: Option, - mhtype: Option, - pin: Option, - ) -> Result { - let context = self.context().clone(); - self.api() - .block_put_post(file, cid_codec, mhtype, pin, &context) - .await - } - /// Report statistics about a block async fn block_stat_post(&self, arg: String) -> Result { let context = self.context().clone(); @@ -388,28 +298,6 @@ impl + Send + Sync, C: Clone + Send + Sync> ApiNoContext for Contex self.api().dag_get_post(arg, output_codec, &context).await } - /// Import a CAR file of IPLD nodes into IPFS - async fn dag_import_post( - &self, - file: swagger::ByteArray, - ) -> Result { - let context = self.context().clone(); - self.api().dag_import_post(file, &context).await - } - - /// Put an IPLD node into IPFS - async fn dag_put_post( - &self, - file: swagger::ByteArray, - store_codec: Option, - input_codec: Option, - ) -> Result { - let context = self.context().clone(); - self.api() - .dag_put_post(file, store_codec, input_codec, &context) - .await - } - /// Resolve an IPFS path to a DAG node async fn dag_resolve_post(&self, arg: String) -> Result { let context = self.context().clone(); diff --git a/kubo-rpc-server/src/models.rs b/kubo-rpc-server/src/models.rs index 3e9f80248..d98a2975b 100644 --- a/kubo-rpc-server/src/models.rs +++ b/kubo-rpc-server/src/models.rs @@ -8,7 +8,7 @@ use crate::models; #[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize, validator::Validate)] #[cfg_attr(feature = "conversion", derive(frunk::LabelledGeneric))] -pub struct BlockPutPost200Response { +pub struct BlockStatPost200Response { #[serde(rename = "Key")] pub key: String, @@ -16,17 +16,17 @@ pub struct BlockPutPost200Response { pub size: f64, } -impl BlockPutPost200Response { +impl BlockStatPost200Response { #[allow(clippy::new_without_default)] - pub fn new(key: String, size: f64) -> BlockPutPost200Response { - BlockPutPost200Response { key, size } + pub fn new(key: String, size: f64) -> BlockStatPost200Response { + BlockStatPost200Response { key, size } } } -/// Converts the BlockPutPost200Response value to the Query Parameters representation (style=form, explode=false) +/// Converts the BlockStatPost200Response value to the Query Parameters representation (style=form, explode=false) /// specified in https://swagger.io/docs/specification/serialization/ /// Should be implemented in a serde serializer -impl std::string::ToString for BlockPutPost200Response { +impl std::string::ToString for BlockStatPost200Response { fn to_string(&self) -> String { let params: Vec> = vec![ Some("Key".to_string()), @@ -39,10 +39,10 @@ impl std::string::ToString for BlockPutPost200Response { } } -/// Converts Query Parameters representation (style=form, explode=false) to a BlockPutPost200Response value +/// Converts Query Parameters representation (style=form, explode=false) to a BlockStatPost200Response value /// as specified in https://swagger.io/docs/specification/serialization/ /// Should be implemented in a serde deserializer -impl std::str::FromStr for BlockPutPost200Response { +impl std::str::FromStr for BlockStatPost200Response { type Err = String; fn from_str(s: &str) -> std::result::Result { @@ -65,7 +65,7 @@ impl std::str::FromStr for BlockPutPost200Response { Some(x) => x, None => { return std::result::Result::Err( - "Missing value while parsing BlockPutPost200Response".to_string(), + "Missing value while parsing BlockStatPost200Response".to_string(), ) } }; @@ -83,7 +83,7 @@ impl std::str::FromStr for BlockPutPost200Response { ), _ => { return std::result::Result::Err( - "Unexpected key while parsing BlockPutPost200Response".to_string(), + "Unexpected key while parsing BlockStatPost200Response".to_string(), ) } } @@ -94,37 +94,37 @@ impl std::str::FromStr for BlockPutPost200Response { } // Use the intermediate representation to return the struct - std::result::Result::Ok(BlockPutPost200Response { + std::result::Result::Ok(BlockStatPost200Response { key: intermediate_rep .key .into_iter() .next() - .ok_or_else(|| "Key missing in BlockPutPost200Response".to_string())?, + .ok_or_else(|| "Key missing in BlockStatPost200Response".to_string())?, size: intermediate_rep .size .into_iter() .next() - .ok_or_else(|| "Size missing in BlockPutPost200Response".to_string())?, + .ok_or_else(|| "Size missing in BlockStatPost200Response".to_string())?, }) } } -// Methods for converting between header::IntoHeaderValue and hyper::header::HeaderValue +// Methods for converting between header::IntoHeaderValue and hyper::header::HeaderValue #[cfg(any(feature = "client", feature = "server"))] -impl std::convert::TryFrom> +impl std::convert::TryFrom> for hyper::header::HeaderValue { type Error = String; fn try_from( - hdr_value: header::IntoHeaderValue, + hdr_value: header::IntoHeaderValue, ) -> std::result::Result { let hdr_value = hdr_value.to_string(); match hyper::header::HeaderValue::from_str(&hdr_value) { std::result::Result::Ok(value) => std::result::Result::Ok(value), std::result::Result::Err(e) => std::result::Result::Err(format!( - "Invalid header value for BlockPutPost200Response - value: {} is invalid {}", + "Invalid header value for BlockStatPost200Response - value: {} is invalid {}", hdr_value, e )), } @@ -133,19 +133,19 @@ impl std::convert::TryFrom> #[cfg(any(feature = "client", feature = "server"))] impl std::convert::TryFrom - for header::IntoHeaderValue + for header::IntoHeaderValue { type Error = String; fn try_from(hdr_value: hyper::header::HeaderValue) -> std::result::Result { match hdr_value.to_str() { std::result::Result::Ok(value) => { - match ::from_str(value) { + match ::from_str(value) { std::result::Result::Ok(value) => { std::result::Result::Ok(header::IntoHeaderValue(value)) } std::result::Result::Err(err) => std::result::Result::Err(format!( - "Unable to convert header value '{}' into BlockPutPost200Response - {}", + "Unable to convert header value '{}' into BlockStatPost200Response - {}", value, err )), } @@ -203,416 +203,6 @@ impl std::str::FromStr for Codecs { } } -#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize, validator::Validate)] -#[cfg_attr(feature = "conversion", derive(frunk::LabelledGeneric))] -pub struct DagImportPost200Response { - #[serde(rename = "Root")] - pub root: models::DagPutPost200Response, -} - -impl DagImportPost200Response { - #[allow(clippy::new_without_default)] - pub fn new(root: models::DagPutPost200Response) -> DagImportPost200Response { - DagImportPost200Response { root } - } -} - -/// Converts the DagImportPost200Response value to the Query Parameters representation (style=form, explode=false) -/// specified in https://swagger.io/docs/specification/serialization/ -/// Should be implemented in a serde serializer -impl std::string::ToString for DagImportPost200Response { - fn to_string(&self) -> String { - let params: Vec> = vec![ - // Skipping Root in query parameter serialization - - ]; - - params.into_iter().flatten().collect::>().join(",") - } -} - -/// Converts Query Parameters representation (style=form, explode=false) to a DagImportPost200Response value -/// as specified in https://swagger.io/docs/specification/serialization/ -/// Should be implemented in a serde deserializer -impl std::str::FromStr for DagImportPost200Response { - type Err = String; - - fn from_str(s: &str) -> std::result::Result { - /// An intermediate representation of the struct to use for parsing. - #[derive(Default)] - #[allow(dead_code)] - struct IntermediateRep { - pub root: Vec, - } - - let mut intermediate_rep = IntermediateRep::default(); - - // Parse into intermediate representation - let mut string_iter = s.split(','); - let mut key_result = string_iter.next(); - - while key_result.is_some() { - let val = match string_iter.next() { - Some(x) => x, - None => { - return std::result::Result::Err( - "Missing value while parsing DagImportPost200Response".to_string(), - ) - } - }; - - if let Some(key) = key_result { - #[allow(clippy::match_single_binding)] - match key { - #[allow(clippy::redundant_clone)] - "Root" => intermediate_rep.root.push( - ::from_str(val) - .map_err(|x| x.to_string())?, - ), - _ => { - return std::result::Result::Err( - "Unexpected key while parsing DagImportPost200Response".to_string(), - ) - } - } - } - - // Get the next key - key_result = string_iter.next(); - } - - // Use the intermediate representation to return the struct - std::result::Result::Ok(DagImportPost200Response { - root: intermediate_rep - .root - .into_iter() - .next() - .ok_or_else(|| "Root missing in DagImportPost200Response".to_string())?, - }) - } -} - -// Methods for converting between header::IntoHeaderValue and hyper::header::HeaderValue - -#[cfg(any(feature = "client", feature = "server"))] -impl std::convert::TryFrom> - for hyper::header::HeaderValue -{ - type Error = String; - - fn try_from( - hdr_value: header::IntoHeaderValue, - ) -> std::result::Result { - let hdr_value = hdr_value.to_string(); - match hyper::header::HeaderValue::from_str(&hdr_value) { - std::result::Result::Ok(value) => std::result::Result::Ok(value), - std::result::Result::Err(e) => std::result::Result::Err(format!( - "Invalid header value for DagImportPost200Response - value: {} is invalid {}", - hdr_value, e - )), - } - } -} - -#[cfg(any(feature = "client", feature = "server"))] -impl std::convert::TryFrom - for header::IntoHeaderValue -{ - type Error = String; - - fn try_from(hdr_value: hyper::header::HeaderValue) -> std::result::Result { - match hdr_value.to_str() { - std::result::Result::Ok(value) => { - match ::from_str(value) { - std::result::Result::Ok(value) => { - std::result::Result::Ok(header::IntoHeaderValue(value)) - } - std::result::Result::Err(err) => std::result::Result::Err(format!( - "Unable to convert header value '{}' into DagImportPost200Response - {}", - value, err - )), - } - } - std::result::Result::Err(e) => std::result::Result::Err(format!( - "Unable to convert header: {:?} to string: {}", - hdr_value, e - )), - } - } -} - -#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize, validator::Validate)] -#[cfg_attr(feature = "conversion", derive(frunk::LabelledGeneric))] -pub struct DagPutPost200Response { - #[serde(rename = "Cid")] - pub cid: models::DagPutPost200ResponseCid, -} - -impl DagPutPost200Response { - #[allow(clippy::new_without_default)] - pub fn new(cid: models::DagPutPost200ResponseCid) -> DagPutPost200Response { - DagPutPost200Response { cid } - } -} - -/// Converts the DagPutPost200Response value to the Query Parameters representation (style=form, explode=false) -/// specified in https://swagger.io/docs/specification/serialization/ -/// Should be implemented in a serde serializer -impl std::string::ToString for DagPutPost200Response { - fn to_string(&self) -> String { - let params: Vec> = vec![ - // Skipping Cid in query parameter serialization - - ]; - - params.into_iter().flatten().collect::>().join(",") - } -} - -/// Converts Query Parameters representation (style=form, explode=false) to a DagPutPost200Response value -/// as specified in https://swagger.io/docs/specification/serialization/ -/// Should be implemented in a serde deserializer -impl std::str::FromStr for DagPutPost200Response { - type Err = String; - - fn from_str(s: &str) -> std::result::Result { - /// An intermediate representation of the struct to use for parsing. - #[derive(Default)] - #[allow(dead_code)] - struct IntermediateRep { - pub cid: Vec, - } - - let mut intermediate_rep = IntermediateRep::default(); - - // Parse into intermediate representation - let mut string_iter = s.split(','); - let mut key_result = string_iter.next(); - - while key_result.is_some() { - let val = match string_iter.next() { - Some(x) => x, - None => { - return std::result::Result::Err( - "Missing value while parsing DagPutPost200Response".to_string(), - ) - } - }; - - if let Some(key) = key_result { - #[allow(clippy::match_single_binding)] - match key { - #[allow(clippy::redundant_clone)] - "Cid" => intermediate_rep.cid.push( - ::from_str(val) - .map_err(|x| x.to_string())?, - ), - _ => { - return std::result::Result::Err( - "Unexpected key while parsing DagPutPost200Response".to_string(), - ) - } - } - } - - // Get the next key - key_result = string_iter.next(); - } - - // Use the intermediate representation to return the struct - std::result::Result::Ok(DagPutPost200Response { - cid: intermediate_rep - .cid - .into_iter() - .next() - .ok_or_else(|| "Cid missing in DagPutPost200Response".to_string())?, - }) - } -} - -// Methods for converting between header::IntoHeaderValue and hyper::header::HeaderValue - -#[cfg(any(feature = "client", feature = "server"))] -impl std::convert::TryFrom> - for hyper::header::HeaderValue -{ - type Error = String; - - fn try_from( - hdr_value: header::IntoHeaderValue, - ) -> std::result::Result { - let hdr_value = hdr_value.to_string(); - match hyper::header::HeaderValue::from_str(&hdr_value) { - std::result::Result::Ok(value) => std::result::Result::Ok(value), - std::result::Result::Err(e) => std::result::Result::Err(format!( - "Invalid header value for DagPutPost200Response - value: {} is invalid {}", - hdr_value, e - )), - } - } -} - -#[cfg(any(feature = "client", feature = "server"))] -impl std::convert::TryFrom - for header::IntoHeaderValue -{ - type Error = String; - - fn try_from(hdr_value: hyper::header::HeaderValue) -> std::result::Result { - match hdr_value.to_str() { - std::result::Result::Ok(value) => { - match ::from_str(value) { - std::result::Result::Ok(value) => { - std::result::Result::Ok(header::IntoHeaderValue(value)) - } - std::result::Result::Err(err) => std::result::Result::Err(format!( - "Unable to convert header value '{}' into DagPutPost200Response - {}", - value, err - )), - } - } - std::result::Result::Err(e) => std::result::Result::Err(format!( - "Unable to convert header: {:?} to string: {}", - hdr_value, e - )), - } - } -} - -#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize, validator::Validate)] -#[cfg_attr(feature = "conversion", derive(frunk::LabelledGeneric))] -pub struct DagPutPost200ResponseCid { - #[serde(rename = "/")] - pub slash: String, -} - -impl DagPutPost200ResponseCid { - #[allow(clippy::new_without_default)] - pub fn new(slash: String) -> DagPutPost200ResponseCid { - DagPutPost200ResponseCid { slash } - } -} - -/// Converts the DagPutPost200ResponseCid value to the Query Parameters representation (style=form, explode=false) -/// specified in https://swagger.io/docs/specification/serialization/ -/// Should be implemented in a serde serializer -impl std::string::ToString for DagPutPost200ResponseCid { - fn to_string(&self) -> String { - let params: Vec> = vec![Some("/".to_string()), Some(self.slash.to_string())]; - - params.into_iter().flatten().collect::>().join(",") - } -} - -/// Converts Query Parameters representation (style=form, explode=false) to a DagPutPost200ResponseCid value -/// as specified in https://swagger.io/docs/specification/serialization/ -/// Should be implemented in a serde deserializer -impl std::str::FromStr for DagPutPost200ResponseCid { - type Err = String; - - fn from_str(s: &str) -> std::result::Result { - /// An intermediate representation of the struct to use for parsing. - #[derive(Default)] - #[allow(dead_code)] - struct IntermediateRep { - pub slash: Vec, - } - - let mut intermediate_rep = IntermediateRep::default(); - - // Parse into intermediate representation - let mut string_iter = s.split(','); - let mut key_result = string_iter.next(); - - while key_result.is_some() { - let val = match string_iter.next() { - Some(x) => x, - None => { - return std::result::Result::Err( - "Missing value while parsing DagPutPost200ResponseCid".to_string(), - ) - } - }; - - if let Some(key) = key_result { - #[allow(clippy::match_single_binding)] - match key { - #[allow(clippy::redundant_clone)] - "/" => intermediate_rep.slash.push( - ::from_str(val).map_err(|x| x.to_string())?, - ), - _ => { - return std::result::Result::Err( - "Unexpected key while parsing DagPutPost200ResponseCid".to_string(), - ) - } - } - } - - // Get the next key - key_result = string_iter.next(); - } - - // Use the intermediate representation to return the struct - std::result::Result::Ok(DagPutPost200ResponseCid { - slash: intermediate_rep - .slash - .into_iter() - .next() - .ok_or_else(|| "/ missing in DagPutPost200ResponseCid".to_string())?, - }) - } -} - -// Methods for converting between header::IntoHeaderValue and hyper::header::HeaderValue - -#[cfg(any(feature = "client", feature = "server"))] -impl std::convert::TryFrom> - for hyper::header::HeaderValue -{ - type Error = String; - - fn try_from( - hdr_value: header::IntoHeaderValue, - ) -> std::result::Result { - let hdr_value = hdr_value.to_string(); - match hyper::header::HeaderValue::from_str(&hdr_value) { - std::result::Result::Ok(value) => std::result::Result::Ok(value), - std::result::Result::Err(e) => std::result::Result::Err(format!( - "Invalid header value for DagPutPost200ResponseCid - value: {} is invalid {}", - hdr_value, e - )), - } - } -} - -#[cfg(any(feature = "client", feature = "server"))] -impl std::convert::TryFrom - for header::IntoHeaderValue -{ - type Error = String; - - fn try_from(hdr_value: hyper::header::HeaderValue) -> std::result::Result { - match hdr_value.to_str() { - std::result::Result::Ok(value) => { - match ::from_str(value) { - std::result::Result::Ok(value) => { - std::result::Result::Ok(header::IntoHeaderValue(value)) - } - std::result::Result::Err(err) => std::result::Result::Err(format!( - "Unable to convert header value '{}' into DagPutPost200ResponseCid - {}", - value, err - )), - } - } - std::result::Result::Err(e) => std::result::Result::Err(format!( - "Unable to convert header: {:?} to string: {}", - hdr_value, e - )), - } - } -} - #[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize, validator::Validate)] #[cfg_attr(feature = "conversion", derive(frunk::LabelledGeneric))] pub struct DagResolvePost200Response { diff --git a/kubo-rpc-server/src/server/mod.rs b/kubo-rpc-server/src/server/mod.rs index b48073ea0..59a54c1da 100644 --- a/kubo-rpc-server/src/server/mod.rs +++ b/kubo-rpc-server/src/server/mod.rs @@ -2,8 +2,6 @@ use futures::{future, future::BoxFuture, future::FutureExt, stream, stream::TryS use hyper::header::{HeaderName, HeaderValue, CONTENT_TYPE}; use hyper::{Body, HeaderMap, Request, Response, StatusCode}; use log::warn; -use multipart::server::save::SaveResult; -use multipart::server::Multipart; #[allow(unused_imports)] use std::convert::{TryFrom, TryInto}; use std::error::Error; @@ -24,10 +22,9 @@ pub use crate::context; type ServiceFuture = BoxFuture<'static, Result, crate::ServiceError>>; use crate::{ - Api, BlockGetPostResponse, BlockPutPostResponse, BlockStatPostResponse, DagGetPostResponse, - DagImportPostResponse, DagPutPostResponse, DagResolvePostResponse, IdPostResponse, - PinAddPostResponse, PinRmPostResponse, SwarmConnectPostResponse, SwarmPeersPostResponse, - VersionPostResponse, + Api, BlockGetPostResponse, BlockStatPostResponse, DagGetPostResponse, DagResolvePostResponse, + IdPostResponse, PinAddPostResponse, PinRmPostResponse, SwarmConnectPostResponse, + SwarmPeersPostResponse, VersionPostResponse, }; mod paths { @@ -36,11 +33,8 @@ mod paths { lazy_static! { pub static ref GLOBAL_REGEX_SET: regex::RegexSet = regex::RegexSet::new(vec![ r"^/api/v0/block/get$", - r"^/api/v0/block/put$", r"^/api/v0/block/stat$", r"^/api/v0/dag/get$", - r"^/api/v0/dag/import$", - r"^/api/v0/dag/put$", r"^/api/v0/dag/resolve$", r"^/api/v0/id$", r"^/api/v0/pin/add$", @@ -52,18 +46,15 @@ mod paths { .expect("Unable to create global regex set"); } pub(crate) static ID_BLOCK_GET: usize = 0; - pub(crate) static ID_BLOCK_PUT: usize = 1; - pub(crate) static ID_BLOCK_STAT: usize = 2; - pub(crate) static ID_DAG_GET: usize = 3; - pub(crate) static ID_DAG_IMPORT: usize = 4; - pub(crate) static ID_DAG_PUT: usize = 5; - pub(crate) static ID_DAG_RESOLVE: usize = 6; - pub(crate) static ID_ID: usize = 7; - pub(crate) static ID_PIN_ADD: usize = 8; - pub(crate) static ID_PIN_RM: usize = 9; - pub(crate) static ID_SWARM_CONNECT: usize = 10; - pub(crate) static ID_SWARM_PEERS: usize = 11; - pub(crate) static ID_VERSION: usize = 12; + pub(crate) static ID_BLOCK_STAT: usize = 1; + pub(crate) static ID_DAG_GET: usize = 2; + pub(crate) static ID_DAG_RESOLVE: usize = 3; + pub(crate) static ID_ID: usize = 4; + pub(crate) static ID_PIN_ADD: usize = 5; + pub(crate) static ID_PIN_RM: usize = 6; + pub(crate) static ID_SWARM_CONNECT: usize = 7; + pub(crate) static ID_SWARM_PEERS: usize = 8; + pub(crate) static ID_VERSION: usize = 9; } pub struct MakeService @@ -308,171 +299,6 @@ where Ok(response) } - // BlockPutPost - POST /block/put - hyper::Method::POST if path.matched(paths::ID_BLOCK_PUT) => { - let boundary = - match swagger::multipart::form::boundary(&headers) { - Some(boundary) => boundary.to_string(), - None => return Ok(Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from("Couldn't find valid multipart body".to_string())) - .expect( - "Unable to create Bad Request response for incorrect boundary", - )), - }; - - // Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response) - let query_params = - form_urlencoded::parse(uri.query().unwrap_or_default().as_bytes()) - .collect::>(); - let param_cid_codec = query_params - .iter() - .filter(|e| e.0 == "cid-codec") - .map(|e| e.1.clone()) - .next(); - let param_cid_codec = match param_cid_codec { - Some(param_cid_codec) => { - let param_cid_codec = - ::from_str(¶m_cid_codec); - match param_cid_codec { - Ok(param_cid_codec) => Some(param_cid_codec), - Err(e) => return Ok(Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from(format!("Couldn't parse query parameter cid-codec - doesn't match schema: {}", e))) - .expect("Unable to create Bad Request response for invalid query parameter cid-codec")), - } - } - None => None, - }; - let param_mhtype = query_params - .iter() - .filter(|e| e.0 == "mhtype") - .map(|e| e.1.clone()) - .next(); - let param_mhtype = match param_mhtype { - Some(param_mhtype) => { - let param_mhtype = - ::from_str(¶m_mhtype); - match param_mhtype { - Ok(param_mhtype) => Some(param_mhtype), - Err(e) => return Ok(Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from(format!("Couldn't parse query parameter mhtype - doesn't match schema: {}", e))) - .expect("Unable to create Bad Request response for invalid query parameter mhtype")), - } - } - None => None, - }; - let param_pin = query_params - .iter() - .filter(|e| e.0 == "pin") - .map(|e| e.1.clone()) - .next(); - let param_pin = match param_pin { - Some(param_pin) => { - let param_pin = ::from_str(¶m_pin); - match param_pin { - Ok(param_pin) => Some(param_pin), - Err(e) => return Ok(Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from(format!("Couldn't parse query parameter pin - doesn't match schema: {}", e))) - .expect("Unable to create Bad Request response for invalid query parameter pin")), - } - } - None => None, - }; - - // Form Body parameters (note that non-required body parameters will ignore garbage - // values, rather than causing a 400 response). Produce warning header and logs for - // any unused fields. - let result = body.into_raw(); - match result.await { - Ok(body) => { - use std::io::Read; - - // Read Form Parameters from body - let mut entries = match Multipart::with_body(&body.to_vec()[..], boundary).save().temp() { - SaveResult::Full(entries) => { - entries - }, - _ => { - return Ok(Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from("Unable to process all message parts".to_string())) - .expect("Unable to create Bad Request response due to failure to process all message")) - }, - }; - let field_file = entries.fields.remove("file"); - let param_file = match field_file { - Some(field) => { - let mut reader = field[0].data.readable().expect("Unable to read field for file"); - let mut data = vec![]; - reader.read_to_end(&mut data).expect("Reading saved binary data should never fail"); - swagger::ByteArray(data) - }, - None => { - return Ok( - Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from("Missing required form parameter file".to_string())) - .expect("Unable to create Bad Request due to missing required form parameter file")) - } - }; - let result = api_impl.block_put_post( - param_file, - param_cid_codec, - param_mhtype, - param_pin, - &context - ).await; - let mut response = Response::new(Body::empty()); - response.headers_mut().insert( - HeaderName::from_static("x-span-id"), - HeaderValue::from_str((&context as &dyn Has).get().0.clone().as_str()) - .expect("Unable to create X-Span-ID header value")); - - match result { - Ok(rsp) => match rsp { - BlockPutPostResponse::Success - (body) - => { - *response.status_mut() = StatusCode::from_u16(200).expect("Unable to turn 200 into a StatusCode"); - response.headers_mut().insert( - CONTENT_TYPE, - HeaderValue::from_str("application/json") - .expect("Unable to create Content-Type header for BLOCK_PUT_POST_SUCCESS")); - let body_content = serde_json::to_string(&body).expect("impossible to fail to serialize"); - *response.body_mut() = Body::from(body_content); - }, - BlockPutPostResponse::BadRequest - (body) - => { - *response.status_mut() = StatusCode::from_u16(400).expect("Unable to turn 400 into a StatusCode"); - response.headers_mut().insert( - CONTENT_TYPE, - HeaderValue::from_str("application/json") - .expect("Unable to create Content-Type header for BLOCK_PUT_POST_BAD_REQUEST")); - let body_content = serde_json::to_string(&body).expect("impossible to fail to serialize"); - *response.body_mut() = Body::from(body_content); - }, - }, - Err(_) => { - // Application code returned an error. This should not happen, as the implementation should - // return a valid response. - *response.status_mut() = StatusCode::INTERNAL_SERVER_ERROR; - *response.body_mut() = Body::from("An internal error occurred"); - }, - } - - Ok(response) - }, - Err(e) => Ok(Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from("Couldn't read multipart body".to_string())) - .expect("Unable to create Bad Request response due to unable read multipart body")), - } - } - // BlockStatPost - POST /block/stat hyper::Method::POST if path.matched(paths::ID_BLOCK_STAT) => { // Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response) @@ -659,253 +485,6 @@ where Ok(response) } - // DagImportPost - POST /dag/import - hyper::Method::POST if path.matched(paths::ID_DAG_IMPORT) => { - let boundary = - match swagger::multipart::form::boundary(&headers) { - Some(boundary) => boundary.to_string(), - None => return Ok(Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from("Couldn't find valid multipart body".to_string())) - .expect( - "Unable to create Bad Request response for incorrect boundary", - )), - }; - - // Form Body parameters (note that non-required body parameters will ignore garbage - // values, rather than causing a 400 response). Produce warning header and logs for - // any unused fields. - let result = body.into_raw(); - match result.await { - Ok(body) => { - use std::io::Read; - - // Read Form Parameters from body - let mut entries = match Multipart::with_body(&body.to_vec()[..], boundary).save().temp() { - SaveResult::Full(entries) => { - entries - }, - _ => { - return Ok(Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from("Unable to process all message parts".to_string())) - .expect("Unable to create Bad Request response due to failure to process all message")) - }, - }; - let field_file = entries.fields.remove("file"); - let param_file = match field_file { - Some(field) => { - let mut reader = field[0].data.readable().expect("Unable to read field for file"); - let mut data = vec![]; - reader.read_to_end(&mut data).expect("Reading saved binary data should never fail"); - swagger::ByteArray(data) - }, - None => { - return Ok( - Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from("Missing required form parameter file".to_string())) - .expect("Unable to create Bad Request due to missing required form parameter file")) - } - }; - let result = api_impl.dag_import_post( - param_file, - &context - ).await; - let mut response = Response::new(Body::empty()); - response.headers_mut().insert( - HeaderName::from_static("x-span-id"), - HeaderValue::from_str((&context as &dyn Has).get().0.clone().as_str()) - .expect("Unable to create X-Span-ID header value")); - - match result { - Ok(rsp) => match rsp { - DagImportPostResponse::Success - (body) - => { - *response.status_mut() = StatusCode::from_u16(200).expect("Unable to turn 200 into a StatusCode"); - response.headers_mut().insert( - CONTENT_TYPE, - HeaderValue::from_str("application/json") - .expect("Unable to create Content-Type header for DAG_IMPORT_POST_SUCCESS")); - let body_content = serde_json::to_string(&body).expect("impossible to fail to serialize"); - *response.body_mut() = Body::from(body_content); - }, - DagImportPostResponse::BadRequest - (body) - => { - *response.status_mut() = StatusCode::from_u16(400).expect("Unable to turn 400 into a StatusCode"); - response.headers_mut().insert( - CONTENT_TYPE, - HeaderValue::from_str("application/json") - .expect("Unable to create Content-Type header for DAG_IMPORT_POST_BAD_REQUEST")); - let body_content = serde_json::to_string(&body).expect("impossible to fail to serialize"); - *response.body_mut() = Body::from(body_content); - }, - }, - Err(_) => { - // Application code returned an error. This should not happen, as the implementation should - // return a valid response. - *response.status_mut() = StatusCode::INTERNAL_SERVER_ERROR; - *response.body_mut() = Body::from("An internal error occurred"); - }, - } - - Ok(response) - }, - Err(e) => Ok(Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from("Couldn't read multipart body".to_string())) - .expect("Unable to create Bad Request response due to unable read multipart body")), - } - } - - // DagPutPost - POST /dag/put - hyper::Method::POST if path.matched(paths::ID_DAG_PUT) => { - let boundary = - match swagger::multipart::form::boundary(&headers) { - Some(boundary) => boundary.to_string(), - None => return Ok(Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from("Couldn't find valid multipart body".to_string())) - .expect( - "Unable to create Bad Request response for incorrect boundary", - )), - }; - - // Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response) - let query_params = - form_urlencoded::parse(uri.query().unwrap_or_default().as_bytes()) - .collect::>(); - let param_store_codec = query_params - .iter() - .filter(|e| e.0 == "store-codec") - .map(|e| e.1.clone()) - .next(); - let param_store_codec = match param_store_codec { - Some(param_store_codec) => { - let param_store_codec = - ::from_str(¶m_store_codec); - match param_store_codec { - Ok(param_store_codec) => Some(param_store_codec), - Err(e) => return Ok(Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from(format!("Couldn't parse query parameter store-codec - doesn't match schema: {}", e))) - .expect("Unable to create Bad Request response for invalid query parameter store-codec")), - } - } - None => None, - }; - let param_input_codec = query_params - .iter() - .filter(|e| e.0 == "input-codec") - .map(|e| e.1.clone()) - .next(); - let param_input_codec = match param_input_codec { - Some(param_input_codec) => { - let param_input_codec = - ::from_str(¶m_input_codec); - match param_input_codec { - Ok(param_input_codec) => Some(param_input_codec), - Err(e) => return Ok(Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from(format!("Couldn't parse query parameter input-codec - doesn't match schema: {}", e))) - .expect("Unable to create Bad Request response for invalid query parameter input-codec")), - } - } - None => None, - }; - - // Form Body parameters (note that non-required body parameters will ignore garbage - // values, rather than causing a 400 response). Produce warning header and logs for - // any unused fields. - let result = body.into_raw(); - match result.await { - Ok(body) => { - use std::io::Read; - - // Read Form Parameters from body - let mut entries = match Multipart::with_body(&body.to_vec()[..], boundary).save().temp() { - SaveResult::Full(entries) => { - entries - }, - _ => { - return Ok(Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from("Unable to process all message parts".to_string())) - .expect("Unable to create Bad Request response due to failure to process all message")) - }, - }; - let field_file = entries.fields.remove("file"); - let param_file = match field_file { - Some(field) => { - let mut reader = field[0].data.readable().expect("Unable to read field for file"); - let mut data = vec![]; - reader.read_to_end(&mut data).expect("Reading saved binary data should never fail"); - swagger::ByteArray(data) - }, - None => { - return Ok( - Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from("Missing required form parameter file".to_string())) - .expect("Unable to create Bad Request due to missing required form parameter file")) - } - }; - let result = api_impl.dag_put_post( - param_file, - param_store_codec, - param_input_codec, - &context - ).await; - let mut response = Response::new(Body::empty()); - response.headers_mut().insert( - HeaderName::from_static("x-span-id"), - HeaderValue::from_str((&context as &dyn Has).get().0.clone().as_str()) - .expect("Unable to create X-Span-ID header value")); - - match result { - Ok(rsp) => match rsp { - DagPutPostResponse::Success - (body) - => { - *response.status_mut() = StatusCode::from_u16(200).expect("Unable to turn 200 into a StatusCode"); - response.headers_mut().insert( - CONTENT_TYPE, - HeaderValue::from_str("application/json") - .expect("Unable to create Content-Type header for DAG_PUT_POST_SUCCESS")); - let body_content = serde_json::to_string(&body).expect("impossible to fail to serialize"); - *response.body_mut() = Body::from(body_content); - }, - DagPutPostResponse::BadRequest - (body) - => { - *response.status_mut() = StatusCode::from_u16(400).expect("Unable to turn 400 into a StatusCode"); - response.headers_mut().insert( - CONTENT_TYPE, - HeaderValue::from_str("application/json") - .expect("Unable to create Content-Type header for DAG_PUT_POST_BAD_REQUEST")); - let body_content = serde_json::to_string(&body).expect("impossible to fail to serialize"); - *response.body_mut() = Body::from(body_content); - }, - }, - Err(_) => { - // Application code returned an error. This should not happen, as the implementation should - // return a valid response. - *response.status_mut() = StatusCode::INTERNAL_SERVER_ERROR; - *response.body_mut() = Body::from("An internal error occurred"); - }, - } - - Ok(response) - }, - Err(e) => Ok(Response::builder() - .status(StatusCode::BAD_REQUEST) - .body(Body::from("Couldn't read multipart body".to_string())) - .expect("Unable to create Bad Request response due to unable read multipart body")), - } - } - // DagResolvePost - POST /dag/resolve hyper::Method::POST if path.matched(paths::ID_DAG_RESOLVE) => { // Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response) @@ -1439,11 +1018,8 @@ where } _ if path.matched(paths::ID_BLOCK_GET) => method_not_allowed(), - _ if path.matched(paths::ID_BLOCK_PUT) => method_not_allowed(), _ if path.matched(paths::ID_BLOCK_STAT) => method_not_allowed(), _ if path.matched(paths::ID_DAG_GET) => method_not_allowed(), - _ if path.matched(paths::ID_DAG_IMPORT) => method_not_allowed(), - _ if path.matched(paths::ID_DAG_PUT) => method_not_allowed(), _ if path.matched(paths::ID_DAG_RESOLVE) => method_not_allowed(), _ if path.matched(paths::ID_ID) => method_not_allowed(), _ if path.matched(paths::ID_PIN_ADD) => method_not_allowed(), @@ -1469,16 +1045,10 @@ impl RequestParser for ApiRequestParser { match *request.method() { // BlockGetPost - POST /block/get hyper::Method::POST if path.matched(paths::ID_BLOCK_GET) => Some("BlockGetPost"), - // BlockPutPost - POST /block/put - hyper::Method::POST if path.matched(paths::ID_BLOCK_PUT) => Some("BlockPutPost"), // BlockStatPost - POST /block/stat hyper::Method::POST if path.matched(paths::ID_BLOCK_STAT) => Some("BlockStatPost"), // DagGetPost - POST /dag/get hyper::Method::POST if path.matched(paths::ID_DAG_GET) => Some("DagGetPost"), - // DagImportPost - POST /dag/import - hyper::Method::POST if path.matched(paths::ID_DAG_IMPORT) => Some("DagImportPost"), - // DagPutPost - POST /dag/put - hyper::Method::POST if path.matched(paths::ID_DAG_PUT) => Some("DagPutPost"), // DagResolvePost - POST /dag/resolve hyper::Method::POST if path.matched(paths::ID_DAG_RESOLVE) => Some("DagResolvePost"), // IdPost - POST /id diff --git a/kubo-rpc/Cargo.toml b/kubo-rpc/Cargo.toml index 943368a3d..0cda8e0a1 100644 --- a/kubo-rpc/Cargo.toml +++ b/kubo-rpc/Cargo.toml @@ -14,7 +14,6 @@ http = [ "dep:pin-project", "dep:serde", "dep:serde_json", - "dep:prometheus-client", ] [dependencies] @@ -33,13 +32,14 @@ hex.workspace = true iroh-car.workspace = true iroh-rpc-client.workspace = true iroh-rpc-types.workspace = true +iroh-bitswap.workspace = true itertools = "0.11.0" libipld.workspace = true libp2p-identity.workspace = true libp2p.workspace = true multiaddr.workspace = true pin-project = { version = "1.1.3", optional = true } -prometheus-client = { workspace = true, optional = true } +prometheus-client.workspace = true serde = { workspace = true, optional = true } serde_json = { workspace = true, optional = true } swagger.workspace = true diff --git a/kubo-rpc/kubo-rpc.yaml b/kubo-rpc/kubo-rpc.yaml index 31d20cb09..9d385ba9b 100644 --- a/kubo-rpc/kubo-rpc.yaml +++ b/kubo-rpc/kubo-rpc.yaml @@ -47,103 +47,6 @@ paths: application/json: schema: $ref: '#/components/schemas/Error' - '/dag/put': - post: - summary: Put an IPLD node into IPFS - parameters: - - name: store-codec - in: query - description: IPFS path to DAG node - schema: - $ref: '#/components/schemas/Codecs' - default: dag-cbor - required: false - - name: input-codec - in: query - description: Output encoding of the data - required: false - schema: - $ref: '#/components/schemas/Codecs' - default: dag-json - requestBody: - content: - multipart/form-data: - schema: - type: object - required: - - file - properties: - file: - type: string - format: byte - responses: - '200': - description: success - content: - application/json: - schema: - type: object - required: - - Cid - properties: - Cid: - type: object - required: - - '/' - properties: - '/': - type: string - '400': - description: bad request - content: - application/json: - schema: - $ref: '#/components/schemas/Error' - '/dag/import': - post: - summary: Import a CAR file of IPLD nodes into IPFS - requestBody: - content: - multipart/form-data: - schema: - type: object - required: - - file - properties: - file: - type: string - format: byte - responses: - '200': - description: success - content: - application/json: - schema: - type: object - required: - - Root - properties: - Root: - type: object - required: - - Cid - properties: - Cid: - type: object - required: - - '/' - properties: - '/': - type: string - - '400': - description: bad request - content: - application/json: - schema: - $ref: '#/components/schemas/Error' - - '/dag/resolve': post: summary: Resolve an IPFS path to a DAG node @@ -225,61 +128,6 @@ paths: application/json: schema: $ref: '#/components/schemas/Error' - '/block/put': - post: - summary: Put a single IPFS block - parameters: - - name: cid-codec - in: query - description: Codec of the block data - schema: - $ref: '#/components/schemas/Codecs' - default: raw - - name: mhtype - in: query - description: Multihash type - schema: - $ref: '#/components/schemas/Multihash' - default: sha2-256 - - name: pin - in: query - description: Whether to recursively pin the block - schema: - type: bool - default: false - requestBody: - content: - multipart/form-data: - schema: - type: object - required: - - file - properties: - file: - type: string - format: byte - responses: - '200': - description: success - content: - application/json: - schema: - type: object - required: - - Key - - Size - properties: - Key: - type: string - Size: - type: number - - '400': - description: bad request - content: - application/json: - schema: - $ref: '#/components/schemas/Error' '/block/stat': post: summary: Report statistics about a block diff --git a/kubo-rpc/src/block.rs b/kubo-rpc/src/block.rs index 5d0cf1d50..5d18a6f85 100644 --- a/kubo-rpc/src/block.rs +++ b/kubo-rpc/src/block.rs @@ -1,36 +1,17 @@ //! Implements the dag endpoints. -use crate::Cid; -use libipld::{ - multihash::{Code, MultihashDigest}, - prelude::Codec, -}; - -use crate::{error::Error, IpfsDep}; +use crate::{error::Error, Cid, IpfsDep}; /// Get a block from IPFS. #[tracing::instrument(skip(client))] -pub async fn get(client: T, cid: Cid, offline: bool) -> Result, Error> +pub async fn get(client: T, cid: Cid) -> Result, Error> where T: IpfsDep, { - let bytes = client.block_get(cid, offline).await?; + let bytes = client.block_get(cid).await?; Ok(bytes.to_vec()) } -/// Store a block into IFPS. -#[tracing::instrument(skip_all)] -pub async fn put(client: T, codec: C, data: Vec) -> Result -where - T: IpfsDep, - C: Codec, -{ - let hash = Code::Sha2_256.digest(&data); - let cid = Cid::new_v1(codec.into(), hash); - client.put(cid, data.into(), vec![]).await?; - Ok(cid) -} - /// Resolve an IPLD block. #[tracing::instrument(skip(client))] pub async fn stat(client: T, cid: Cid) -> Result diff --git a/kubo-rpc/src/dag.rs b/kubo-rpc/src/dag.rs index 68f9647cd..0fc3668e7 100644 --- a/kubo-rpc/src/dag.rs +++ b/kubo-rpc/src/dag.rs @@ -1,14 +1,9 @@ //! Implements the dag endpoints. -use std::io::{Read, Seek}; -use anyhow::anyhow; -use bytes::Bytes; use libipld::{ - multihash::{Code, MultihashDigest}, - prelude::{Codec, Decode, Encode}, + prelude::{Codec, Encode}, Ipld, }; -use tokio::io::AsyncRead; use crate::{error::Error, IpfsDep}; use crate::{Cid, IpfsPath}; @@ -28,61 +23,6 @@ where Ok(bytes) } -/// Store a DAG node into IPFS. -#[tracing::instrument(skip_all)] -pub async fn put( - client: T, - input_codec: I, - store_codec: S, - data: &mut R, -) -> Result -where - T: IpfsDep, - I: Codec, - S: Codec, - Ipld: Decode, - Ipld: Encode, - R: Read + Seek, -{ - let dag_data = Ipld::decode(input_codec, data).map_err(Error::Invalid)?; - - let mut blob: Vec = Vec::new(); - dag_data - .encode(store_codec, &mut blob) - .map_err(Error::Internal)?; - - let hash = Code::Sha2_256.digest(&blob); - let cid = Cid::new_v1(store_codec.into(), hash); - client.put(cid, blob.into(), vec![]).await?; - Ok(cid) -} - -/// Import data representing a car file -#[tracing::instrument(skip_all)] -pub async fn import(client: T, data: R) -> Result, Error> -where - T: IpfsDep, - R: AsyncRead + Send + Unpin, -{ - let mut reader = iroh_car::CarReader::new(data) - .await - .map_err(|e| Error::Internal(e.into()))?; - if reader.header().roots().is_empty() { - // Ref: https://ipld.io/specs/transport/car/carv1/#number-of-roots - return Err(Error::Invalid(anyhow!( - "car file must have at least one root." - ))); - } - while let Some(block) = reader - .next_block() - .await - .map_err(|e| Error::Internal(e.into()))? - { - client.put(block.0, Bytes::from(block.1), vec![]).await?; - } - Ok(reader.header().roots().to_vec()) -} - /// Resolve an IPLD node #[tracing::instrument(skip(client))] pub async fn resolve(client: T, path: &IpfsPath) -> Result<(Cid, String), Error> diff --git a/kubo-rpc/src/http.rs b/kubo-rpc/src/http.rs index 7774b44f1..9533a0788 100644 --- a/kubo-rpc/src/http.rs +++ b/kubo-rpc/src/http.rs @@ -4,26 +4,24 @@ mod metrics; pub use metrics::{api::MetricsMiddleware, Metrics}; -use std::{collections::HashSet, io::Cursor, marker::PhantomData, str::FromStr, time::Duration}; +use std::{collections::HashSet, marker::PhantomData, str::FromStr, time::Duration}; use anyhow::anyhow; use async_trait::async_trait; use ceramic_kubo_rpc_server::{ models::{ - self, BlockPutPost200Response, Codecs, DagImportPost200Response, DagPutPost200Response, - DagPutPost200ResponseCid, DagResolvePost200Response, DagResolvePost200ResponseCid, - IdPost200Response, Multihash, PinAddPost200Response, SwarmConnectPost200Response, - SwarmPeersPost200Response, SwarmPeersPost200ResponsePeersInner, VersionPost200Response, + self, BlockStatPost200Response, Codecs, DagResolvePost200Response, + DagResolvePost200ResponseCid, IdPost200Response, PinAddPost200Response, + SwarmConnectPost200Response, SwarmPeersPost200Response, + SwarmPeersPost200ResponsePeersInner, VersionPost200Response, }, - Api, BlockGetPostResponse, BlockPutPostResponse, BlockStatPostResponse, DagGetPostResponse, - DagImportPostResponse, DagPutPostResponse, DagResolvePostResponse, IdPostResponse, - PinAddPostResponse, PinRmPostResponse, SwarmConnectPostResponse, SwarmPeersPostResponse, - VersionPostResponse, + Api, BlockGetPostResponse, BlockStatPostResponse, DagGetPostResponse, DagResolvePostResponse, + IdPostResponse, PinAddPostResponse, PinRmPostResponse, SwarmConnectPostResponse, + SwarmPeersPostResponse, VersionPostResponse, }; use cid::Cid; -use dag_jose::DagJoseCodec; use go_parse_duration::parse_duration; -use libipld::{cbor::DagCborCodec, json::DagJsonCodec, raw::RawCodec}; +use libipld::{cbor::DagCborCodec, json::DagJsonCodec}; use libp2p::{Multiaddr, PeerId}; use multiaddr::Protocol; use serde::Serialize; @@ -95,9 +93,15 @@ where const BLOCK_NOT_FOUND_LOCALLY: &str = "block was not found locally (offline)"; const CONTEXT_DEADLINE_EXCEEDED: &str = "context deadline exceeded"; + // Online is the default so we expect that offline=true is explicitly passed. + if !offline.unwrap_or(false) { + return Ok(BlockGetPostResponse::BadRequest(create_error( + "only offline mode is supported", + ))); + } + let cid = try_or_bad_request!(Cid::from_str(&arg), BlockGetPostResponse); - let offline = offline.unwrap_or(false); - let data_fut = block::get(self.ipfs.clone(), cid, offline); + let data_fut = block::get(self.ipfs.clone(), cid); let data = if let Some(timeout) = timeout { let timeout = try_or_bad_request!( parse_duration(&timeout).map_err(|err| match err { @@ -133,51 +137,6 @@ where Ok(BlockGetPostResponse::Success(ByteArray(data))) } - #[instrument(skip(self, _context, file), fields(file.len = file.0.len()), ret(level = Level::DEBUG), err(level = Level::ERROR))] - async fn block_put_post( - &self, - file: ByteArray, - cid_codec: Option, - mhtype: Option, - pin: Option, - _context: &C, - ) -> Result { - if let Some(pin) = pin { - if pin { - return Ok(BlockPutPostResponse::BadRequest(create_error( - "recursive pinning is not supported", - ))); - } - }; - if let Some(mhtype) = mhtype { - if mhtype != Multihash::Sha2256 { - return Ok(BlockPutPostResponse::BadRequest(create_error( - "unsupported multihash type", - ))); - } - }; - - let size = file.0.len(); - let cid = match cid_codec.unwrap_or(Codecs::Raw) { - Codecs::Raw => block::put(self.ipfs.clone(), RawCodec, file.0) - .await - .map_err(to_api_error)?, - Codecs::DagCbor => block::put(self.ipfs.clone(), DagCborCodec, file.0) - .await - .map_err(to_api_error)?, - Codecs::DagJson => block::put(self.ipfs.clone(), DagJsonCodec, file.0) - .await - .map_err(to_api_error)?, - Codecs::DagJose => block::put(self.ipfs.clone(), DagJoseCodec, file.0) - .await - .map_err(to_api_error)?, - }; - Ok(BlockPutPostResponse::Success(BlockPutPost200Response { - key: cid.to_string(), - size: size as f64, - })) - } - #[instrument(skip(self, _context), ret(level = Level::DEBUG), err(level = Level::ERROR))] async fn block_stat_post( &self, @@ -188,7 +147,7 @@ where let size = block::stat(self.ipfs.clone(), cid) .await .map_err(to_api_error)?; - Ok(BlockStatPostResponse::Success(BlockPutPost200Response { + Ok(BlockStatPostResponse::Success(BlockStatPost200Response { key: cid.to_string(), size: size as f64, })) @@ -219,73 +178,6 @@ where } } - #[instrument(skip(self, _context, file), fields(file.len = file.0.len()), ret(level = Level::DEBUG), err(level = Level::ERROR))] - async fn dag_import_post( - &self, - file: swagger::ByteArray, - _context: &C, - ) -> Result { - let cids = dag::import(self.ipfs.clone(), file.0.as_slice()) - .await - .map_err(to_api_error)?; - Ok(DagImportPostResponse::Success(DagImportPost200Response { - root: DagPutPost200Response { - cid: DagPutPost200ResponseCid { - // We know that the CAR file will have at least one root at this point, - // otherwise we'd have errored out during the import. - slash: cids[0].to_string(), - }, - }, - })) - } - - #[instrument(skip(self, _context, file), fields(file.len = file.0.len()), ret(level = Level::DEBUG), err(level = Level::ERROR))] - async fn dag_put_post( - &self, - file: ByteArray, - store_codec: Option, - input_codec: Option, - _context: &C, - ) -> Result { - let mut file = Cursor::new(file.0); - let cid = match ( - input_codec.unwrap_or(Codecs::DagJson), - store_codec.unwrap_or(Codecs::DagCbor), - ) { - (Codecs::DagJson, Codecs::DagJson) => { - dag::put(self.ipfs.clone(), DagJsonCodec, DagJsonCodec, &mut file) - .await - .map_err(to_api_error)? - } - (Codecs::DagJson, Codecs::DagCbor) => { - dag::put(self.ipfs.clone(), DagJsonCodec, DagCborCodec, &mut file) - .await - .map_err(to_api_error)? - } - (Codecs::DagCbor, Codecs::DagCbor) => { - dag::put(self.ipfs.clone(), DagCborCodec, DagCborCodec, &mut file) - .await - .map_err(to_api_error)? - } - (Codecs::DagJose, Codecs::DagJose) => { - dag::put(self.ipfs.clone(), DagJoseCodec, DagJoseCodec, &mut file) - .await - .map_err(to_api_error)? - } - (input, store) => { - return Ok(DagPutPostResponse::BadRequest(create_error(&format!( - "unsupported codec combination, input-codec: {input}, store-codec: {store}", - )))) - } - }; - - Ok(DagPutPostResponse::Success(DagPutPost200Response { - cid: DagPutPost200ResponseCid { - slash: cid.to_string(), - }, - })) - } - #[instrument(skip(self, _context), ret(level = Level::DEBUG), err(level = Level::ERROR))] async fn dag_resolve_post( &self, @@ -472,7 +364,7 @@ struct ErrorJson<'a> { #[cfg(test)] mod tests { - use std::collections::HashMap; + use std::{collections::HashMap, io::Cursor}; use super::*; use crate::{tests::MockIpfsDepTest, PeerInfo}; @@ -545,20 +437,9 @@ mod tests { #[tokio::test] #[traced_test] async fn block_get() { - // Test data from: - // https://ipld.io/specs/codecs/dag-pb/fixtures/cross-codec/#dagpb_data_some - let data = hex::decode("0a050001020304").unwrap(); let cid = Cid::from_str("bafybeibazl2z4vqp2tmwcfag6wirmtpnomxknqcgrauj7m2yisrz3qjbom").unwrap(); - let mut mock_ipfs = MockIpfsDepTest::new(); - mock_ipfs.expect_clone().once().return_once(move || { - let mut m = MockIpfsDepTest::new(); - m.expect_block_get() - .once() - .with(predicate::eq(cid), predicate::eq(false)) - .return_once(move |_, _| Ok(Bytes::from(data))); - m - }); + let mock_ipfs = MockIpfsDepTest::new(); let server = Server::new(mock_ipfs); let resp = server .block_get_post(cid.to_string(), None, None, &Context) @@ -566,8 +447,12 @@ mod tests { .unwrap(); expect![[r#" - Success( - 0a050001020304, + BadRequest( + Error { + message: "only offline mode is supported", + code: 0.0, + type: "error", + }, ) "#]] .assert_debug_eq(&DebugResponse::from(resp)); @@ -586,8 +471,8 @@ mod tests { let mut m = MockIpfsDepTest::new(); m.expect_block_get() .once() - .with(predicate::eq(cid), predicate::eq(true)) - .return_once(move |_, _| Ok(Bytes::from(data))); + .with(predicate::eq(cid)) + .return_once(move |_| Ok(Bytes::from(data))); m }); let server = Server::new(mock_ipfs); @@ -613,8 +498,8 @@ mod tests { let mut m = MockIpfsDepTest::new(); m.expect_block_get() .once() - .with(predicate::eq(cid), predicate::eq(true)) - .return_once(move |_, _| Err(crate::error::Error::NotFound)); + .with(predicate::eq(cid)) + .return_once(move |_| Err(crate::error::Error::NotFound)); m }); let server = Server::new(mock_ipfs); @@ -648,14 +533,19 @@ mod tests { let mut m = MockIpfsDepTest::new(); m.expect_block_get() .once() - .with(predicate::eq(cid), predicate::eq(false)) - .return_once(move |_, _| Ok(Bytes::from(data))); + .with(predicate::eq(cid)) + .return_once(move |_| Ok(Bytes::from(data))); m }); let server = Server::new(mock_ipfs); let resp = server - .block_get_post(cid.to_string(), Some("1s".to_string()), None, &Context) + .block_get_post( + cid.to_string(), + Some("1s".to_string()), + Some(true), + &Context, + ) .await .unwrap(); @@ -675,7 +565,7 @@ mod tests { let server = Server::new(mock_ipfs); let resp = server - .block_get_post("invalid cid".to_string(), None, None, &Context) + .block_get_post("invalid cid".to_string(), None, Some(true), &Context) .await .unwrap(); @@ -691,83 +581,6 @@ mod tests { .assert_debug_eq(&DebugResponse::from(resp)); } - #[tokio::test] - #[traced_test] - async fn block_put() { - // Test data from: - // https://ipld.io/specs/codecs/dag-json/fixtures/cross-codec/#array-mixed - let cbor_cid = - Cid::from_str("bafyreidufmzzejc3p7gmh6ivp4fjvca5jfazk57nu6vdkvki4c4vpja724").unwrap(); // cspell:disable-line - - // Cbor encoded bytes - let file = hex::decode("8c1b0016db6db6db6db71a000100001901f40200202238ff3aa5f702b33b0016db6db6db6db74261316fc48c6175657320c39f76c49b746521").unwrap(); - let blob = Bytes::from(file.clone()); - let mut mock_ipfs = MockIpfsDepTest::new(); - mock_ipfs.expect_clone().once().return_once(move || { - let mut m = MockIpfsDepTest::new(); - m.expect_put() - .once() - .with( - predicate::eq(cbor_cid), - predicate::eq(blob), - predicate::eq(vec![]), - ) - .return_once(move |_, _, _| Ok(())); - m - }); - let server = Server::new(mock_ipfs); - let resp = server - .block_put_post( - ByteArray(file), - Some(Codecs::DagCbor), - Some(Multihash::Sha2256), - Some(false), - &Context, - ) - .await - .unwrap(); - // cSpell:disable - expect![[r#" - Success( - BlockPutPost200Response { - key: "bafyreidufmzzejc3p7gmh6ivp4fjvca5jfazk57nu6vdkvki4c4vpja724", - size: 57.0, - }, - ) - "#]] - .assert_debug_eq(&resp); - // cSpell:enable - } - - #[tokio::test] - #[traced_test] - async fn block_put_bad_request() { - let mock_ipfs = MockIpfsDepTest::new(); - let server = Server::new(mock_ipfs); - - let resp = server - .block_put_post( - ByteArray(vec![]), - Some(Codecs::DagCbor), - Some(Multihash::Sha2256), - Some(true), - &Context, - ) - .await - .unwrap(); - - expect![[r#" - BadRequest( - Error { - message: "recursive pinning is not supported", - code: 0.0, - type: "error", - }, - ) - "#]] - .assert_debug_eq(&resp); - } - #[tokio::test] #[traced_test] async fn block_stat() { @@ -792,7 +605,7 @@ mod tests { expect![[r#" Success( - BlockPutPost200Response { + BlockStatPost200Response { key: "bafybeibazl2z4vqp2tmwcfag6wirmtpnomxknqcgrauj7m2yisrz3qjbom", size: 7.0, }, @@ -927,185 +740,6 @@ mod tests { .assert_debug_eq(&resp); } - #[tokio::test] - #[traced_test] - async fn dag_import() { - let car_file = include_bytes!("testdata/carv1-basic.car"); // cspell:disable-line - let mut mock_ipfs = MockIpfsDepTest::new(); - mock_ipfs.expect_clone().once().return_once(move || { - let mut m = MockIpfsDepTest::new(); - - fn expect_put(m: &mut MockIpfsDepTest, cid: &str) { - m.expect_put() - .once() - .with( - predicate::eq(Cid::from_str(cid).unwrap()), - predicate::always(), - predicate::always(), - ) - .return_once(|_, _, _| Ok(())); - } - - for cid in [ - "bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm", - "QmNX6Tffavsya4xgBi2VJQnSuqy9GsxongxZZ9uZBqp16d", - "bafkreifw7plhl6mofk6sfvhnfh64qmkq73oeqwl6sloru6rehaoujituke", - "QmWXZxVQ9yZfhQxLD35eDR8LiMRsYtHxYqTFCBbJoiJVys", - "bafkreiebzrnroamgos2adnbpgw5apo3z4iishhbdx77gldnbk57d4zdio4", - "QmdwjhxpxzcMsR3qUuj7vUL8pbA7MgR3GAxWi2GLHjsKCT", - "bafkreidbxzk2ryxwwtqxem4l3xyyjvw35yu4tcct4cqeqxwo47zhxgxqwq", - "bafyreidj5idub6mapiupjwjsyyxhyhedxycv4vihfsicm2vt46o7morwlm", - ] { - expect_put(&mut m, cid) - } - m - }); - let server = Server::new(mock_ipfs); - let resp = server - .dag_import_post(ByteArray(car_file.to_vec()), &Context) - .await - .unwrap(); - - expect![[r#" - Success( - DagImportPost200Response { - root: DagPutPost200Response { - cid: DagPutPost200ResponseCid { - slash: "bafyreihyrpefhacm6kkp4ql6j6udakdit7g3dmkzfriqfykhjw6cad5lrm", - }, - }, - }, - ) - "#]] - .assert_debug_eq(&resp); - } - - #[tokio::test] - #[traced_test] - async fn dag_put() { - // Test data from: - // https://ipld.io/specs/codecs/dag-json/fixtures/cross-codec/#array-mixed - let cbor_cid = - Cid::from_str("bafyreidufmzzejc3p7gmh6ivp4fjvca5jfazk57nu6vdkvki4c4vpja724").unwrap(); // cspell:disable-line - - let file = ByteArray(r#"[6433713753386423,65536,500,2,0,-1,-3,-256,-2784428724,-6433713753386424,{"/":{"bytes":"YTE"}},"ÄŒaues ßvÄ›te!"]"#.as_bytes().to_vec()); // cspell:disable-line - - // Cbor encoded bytes - let blob = Bytes::from(hex::decode("8c1b0016db6db6db6db71a000100001901f40200202238ff3aa5f702b33b0016db6db6db6db74261316fc48c6175657320c39f76c49b746521").unwrap()); - let mut mock_ipfs = MockIpfsDepTest::new(); - mock_ipfs.expect_clone().once().return_once(move || { - let mut m = MockIpfsDepTest::new(); - m.expect_put() - .once() - .with( - predicate::eq(cbor_cid), - predicate::eq(blob), - predicate::eq(vec![]), - ) - .return_once(move |_, _, _| Ok(())); - m - }); - let server = Server::new(mock_ipfs); - let resp = server - .dag_put_post(file, None, None, &Context) - .await - .unwrap(); - - // cSpell:disable - expect![[r#" - Success( - DagPutPost200Response { - cid: DagPutPost200ResponseCid { - slash: "bafyreidufmzzejc3p7gmh6ivp4fjvca5jfazk57nu6vdkvki4c4vpja724", - }, - }, - ) - "#]] - .assert_debug_eq(&resp); - // cSpell:enable - } - #[tokio::test] - #[traced_test] - async fn dag_put_store_json() { - // Test data from: - // https://ipld.io/specs/codecs/dag-json/fixtures/cross-codec/#array-mixed - let json_cid = - Cid::from_str("baguqeera4iuxsgqusw3ctry362niptivjyio6dxnsn5afctijsahacub2eza").unwrap(); // cspell:disable-line - let file = ByteArray(r#"[6433713753386423,65536,500,2,0,-1,-3,-256,-2784428724,-6433713753386424,{"/":{"bytes":"YTE"}},"ÄŒaues ßvÄ›te!"]"#.as_bytes().to_vec()); // cspell:disable-line - - // JSON encoded bytes - let blob = Bytes::from(file.0.clone()); - let mut mock_ipfs = MockIpfsDepTest::new(); - mock_ipfs.expect_clone().once().return_once(move || { - let mut m = MockIpfsDepTest::new(); - m.expect_put() - .once() - .with( - predicate::eq(json_cid), - predicate::eq(blob), - predicate::eq(vec![]), - ) - .return_once(move |_, _, _| Ok(())); - m - }); - let server = Server::new(mock_ipfs); - let resp = server - .dag_put_post(file, Some(Codecs::DagJson), None, &Context) - .await - .unwrap(); - - // cSpell:disable - expect![[r#" - Success( - DagPutPost200Response { - cid: DagPutPost200ResponseCid { - slash: "baguqeera4iuxsgqusw3ctry362niptivjyio6dxnsn5afctijsahacub2eza", - }, - }, - ) - "#]] - .assert_debug_eq(&resp); - // cSpell:enable - } - - #[tokio::test] - #[traced_test] - async fn dag_put_bad_request() { - let mock_ipfs = MockIpfsDepTest::new(); - let server = Server::new(mock_ipfs); - - let resp = server - .dag_put_post(ByteArray(vec![]), Some(Codecs::Raw), None, &Context) - .await - .unwrap(); - - expect![[r#" - BadRequest( - Error { - message: "unsupported codec combination, input-codec: dag-json, store-codec: raw", - code: 0.0, - type: "error", - }, - ) - "#]] - .assert_debug_eq(&resp); - - let resp = server - .dag_put_post(ByteArray(vec![]), None, Some(Codecs::Raw), &Context) - .await - .unwrap(); - - expect![[r#" - BadRequest( - Error { - message: "unsupported codec combination, input-codec: raw, store-codec: dag-cbor", - code: 0.0, - type: "error", - }, - ) - "#]] - .assert_debug_eq(&resp); - } #[tokio::test] #[traced_test] async fn dag_resolve() { diff --git a/kubo-rpc/src/http/metrics/api.rs b/kubo-rpc/src/http/metrics/api.rs index 4d97c2592..ece8817e0 100644 --- a/kubo-rpc/src/http/metrics/api.rs +++ b/kubo-rpc/src/http/metrics/api.rs @@ -1,13 +1,12 @@ use async_trait::async_trait; use ceramic_kubo_rpc_server::{ - models, Api, BlockGetPostResponse, BlockPutPostResponse, BlockStatPostResponse, - DagGetPostResponse, DagImportPostResponse, DagPutPostResponse, DagResolvePostResponse, - IdPostResponse, PinAddPostResponse, PinRmPostResponse, SwarmConnectPostResponse, - SwarmPeersPostResponse, VersionPostResponse, + models, Api, BlockGetPostResponse, BlockStatPostResponse, DagGetPostResponse, + DagResolvePostResponse, IdPostResponse, PinAddPostResponse, PinRmPostResponse, + SwarmConnectPostResponse, SwarmPeersPostResponse, VersionPostResponse, }; use ceramic_metrics::Recorder; use futures_util::Future; -use swagger::{ApiError, ByteArray}; +use swagger::ApiError; use tokio::time::Instant; use crate::http::{metrics::Event, Metrics}; @@ -58,23 +57,6 @@ where .await } - /// Put a single IPFS block - async fn block_put_post( - &self, - file: ByteArray, - cid_codec: Option, - mhtype: Option, - pin: Option, - context: &C, - ) -> Result { - self.record( - "/block/put", - self.api - .block_put_post(file, cid_codec, mhtype, pin, context), - ) - .await - } - /// Report statistics about a block async fn block_stat_post( &self, @@ -99,32 +81,6 @@ where .await } - /// Import a CAR file of IPLD nodes into IPFS - async fn dag_import_post( - &self, - file: ByteArray, - context: &C, - ) -> Result { - self.record("/dag/import", self.api.dag_import_post(file, context)) - .await - } - - /// Put an IPLD node into IPFS - async fn dag_put_post( - &self, - file: ByteArray, - store_codec: Option, - input_codec: Option, - context: &C, - ) -> Result { - self.record( - "/dag/put", - self.api - .dag_put_post(file, store_codec, input_codec, context), - ) - .await - } - /// Resolve an IPFS path to a DAG node async fn dag_resolve_post( &self, diff --git a/kubo-rpc/src/ipfs_metrics.rs b/kubo-rpc/src/ipfs_metrics.rs index 27aba102c..67a216200 100644 --- a/kubo-rpc/src/ipfs_metrics.rs +++ b/kubo-rpc/src/ipfs_metrics.rs @@ -119,16 +119,12 @@ where async fn block_size(&self, cid: Cid) -> Result { self.record("block_size", self.ipfs.block_size(cid)).await } - async fn block_get(&self, cid: Cid, offline: bool) -> Result { - self.record("block_get", self.ipfs.block_get(cid, offline)) - .await + async fn block_get(&self, cid: Cid) -> Result { + self.record("block_get", self.ipfs.block_get(cid)).await } async fn get(&self, ipfs_path: &IpfsPath) -> Result<(Cid, Ipld), Error> { self.record("get", self.ipfs.get(ipfs_path)).await } - async fn put(&self, cid: Cid, blob: Bytes, links: Vec) -> Result<(), Error> { - self.record("put", self.ipfs.put(cid, blob, links)).await - } async fn resolve(&self, ipfs_path: &IpfsPath) -> Result<(Cid, String), Error> { self.record("resolve", self.ipfs.resolve(ipfs_path)).await } diff --git a/kubo-rpc/src/lib.rs b/kubo-rpc/src/lib.rs index 9ee307e98..bd89198c2 100644 --- a/kubo-rpc/src/lib.rs +++ b/kubo-rpc/src/lib.rs @@ -10,7 +10,6 @@ use std::{ fmt::{self, Display, Formatter}, io::Cursor, path::PathBuf, - sync::atomic::{AtomicUsize, Ordering}, }; use std::{str::FromStr, sync::Arc}; @@ -19,7 +18,7 @@ use async_trait::async_trait; use dag_jose::DagJoseCodec; use iroh_rpc_client::P2pClient; use libipld::{cbor::DagCborCodec, json::DagJsonCodec, prelude::Decode}; -use tracing::{error, instrument, trace}; +use tracing::instrument; // Pub use any types we export as part of an trait or struct pub use bytes::Bytes; @@ -44,7 +43,6 @@ pub mod swarm; pub mod version; use crate::error::Error; -use ceramic_p2p::SQLiteBlockStore; /// Information about a peer #[derive(Debug)] @@ -145,12 +143,10 @@ pub trait IpfsDep: Clone { /// Get the size of an IPFS block. async fn block_size(&self, cid: Cid) -> Result; /// Get a block from IPFS - async fn block_get(&self, cid: Cid, offline: bool) -> Result; + async fn block_get(&self, cid: Cid) -> Result; /// Get a DAG node from IPFS returning the Cid of the resolved path and the bytes of the node. /// This will locally store the data as a result. async fn get(&self, ipfs_path: &IpfsPath) -> Result<(Cid, Ipld), Error>; - /// Store a DAG node into IPFS. - async fn put(&self, cid: Cid, blob: Bytes, links: Vec) -> Result<(), Error>; /// Resolve an IPLD block. async fn resolve(&self, ipfs_path: &IpfsPath) -> Result<(Cid, String), Error>; /// Report all connected peers of the current node. @@ -162,21 +158,19 @@ pub trait IpfsDep: Clone { } /// Implementation of IPFS APIs -pub struct IpfsService { +pub struct IpfsService { p2p: P2pClient, - store: SQLiteBlockStore, - resolver: Resolver, + store: S, + resolver: Resolver, } -impl IpfsService { +impl IpfsService +where + S: iroh_bitswap::Store + Clone, +{ /// Create new IpfsService - pub fn new(p2p: P2pClient, store: SQLiteBlockStore) -> Self { - let loader = Loader { - p2p: p2p.clone(), - store: store.clone(), - session_counter: AtomicUsize::new(0), - }; - let resolver = Resolver::new(loader); + pub fn new(p2p: P2pClient, store: S) -> Self { + let resolver = Resolver::new(store.clone()); Self { p2p, store, @@ -186,7 +180,10 @@ impl IpfsService { } #[async_trait] -impl IpfsDep for Arc { +impl IpfsDep for Arc> +where + S: iroh_bitswap::Store, +{ /// Get the ID of the local peer. #[instrument(skip(self))] async fn lookup_local(&self) -> Result { @@ -217,27 +214,19 @@ impl IpfsDep for Arc { } #[instrument(skip(self))] async fn block_size(&self, cid: Cid) -> Result { - Ok(self - .store - .get_size(cid) - .await - .map_err(Error::Internal)? - .ok_or(Error::NotFound)?) + if self.store.has(&cid).await.map_err(Error::Internal)? { + Ok(self.store.get_size(&cid).await.map_err(Error::Internal)? as u64) + } else { + Err(Error::NotFound) + } } #[instrument(skip(self))] - async fn block_get(&self, cid: Cid, offline: bool) -> Result { - if offline { - // Read directly from the store - Ok(self - .store - .get(cid) - .await - .map_err(Error::Internal) - .transpose() - .unwrap_or(Err(Error::NotFound))?) + async fn block_get(&self, cid: Cid) -> Result { + // Read directly from the store + if self.store.has(&cid).await.map_err(Error::Internal)? { + Ok(self.store.get(&cid).await.map_err(Error::Internal)?.data) } else { - // TODO do we want to advertise on the DHT all Cids we have? - Ok(self.resolver.load_cid_bytes(cid).await?) + Err(Error::NotFound) } } #[instrument(skip(self))] @@ -246,15 +235,6 @@ impl IpfsDep for Arc { let node = self.resolver.resolve(ipfs_path).await?; Ok((node.cid, node.data)) } - #[instrument(skip(self, blob))] - async fn put(&self, cid: Cid, blob: Bytes, links: Vec) -> Result<(), Error> { - let _new = self - .store - .put(cid, blob, links) - .await - .map_err(Error::Internal)?; - Ok(()) - } #[instrument(skip(self))] async fn resolve(&self, ipfs_path: &IpfsPath) -> Result<(Cid, String), Error> { let node = self.resolver.resolve(ipfs_path).await?; @@ -283,8 +263,8 @@ impl IpfsDep for Arc { // * dag-cbor // * dag-json // * dag-jose -struct Resolver { - loader: Loader, +struct Resolver { + store: S, } // Represents an IPFS DAG node @@ -297,9 +277,12 @@ struct Node { data: Ipld, } -impl Resolver { - fn new(loader: Loader) -> Self { - Resolver { loader } +impl Resolver +where + S: iroh_bitswap::Store, +{ + fn new(store: S) -> Self { + Resolver { store } } #[instrument(skip(self))] async fn resolve(&self, path: &IpfsPath) -> Result { @@ -343,7 +326,8 @@ impl Resolver { } #[instrument(skip(self))] async fn load_cid_bytes(&self, cid: Cid) -> Result { - self.loader.load_cid(cid).await.map_err(Error::Internal) + // Get the cid directly from the store + Ok(self.store.get(&cid).await.map_err(Error::Internal)?.data) } #[instrument(skip(self))] async fn load_cid(&self, cid: Cid) -> Result { @@ -369,60 +353,6 @@ impl Resolver { } } -/// Loader is responsible for fetching Cids. -/// It tries local storage and then the network (via bitswap). -struct Loader { - p2p: P2pClient, - store: SQLiteBlockStore, - session_counter: AtomicUsize, -} - -impl Loader { - // Load a Cid returning its bytes. - // If the Cid was not stored locally it will be added to the local store. - #[instrument(skip(self))] - async fn load_cid(&self, cid: Cid) -> anyhow::Result { - trace!("loading cid"); - - if let Some(loaded) = self.fetch_store(cid).await? { - trace!("loaded from store"); - return Ok(loaded); - } - - let loaded = self.fetch_bitswap(cid).await?; - trace!("loaded from bitswap"); - - // Add loaded cid to the local store - self.store_data(cid, loaded.clone()); - Ok(loaded) - } - - #[instrument(skip(self))] - async fn fetch_store(&self, cid: Cid) -> anyhow::Result> { - self.store.get(cid).await - } - #[instrument(skip(self))] - async fn fetch_bitswap(&self, cid: Cid) -> anyhow::Result { - let session = self.session_counter.fetch_add(1, Ordering::SeqCst) as u64; - self.p2p - .fetch_bitswap(session, cid, Default::default()) - .await - } - - #[instrument(skip(self))] - fn store_data(&self, cid: Cid, data: Bytes) { - // trigger storage in the background - let store = self.store.clone(); - - tokio::spawn(async move { - match store.put(cid, data, vec![]).await { - Ok(_) => {} - Err(err) => error!(?err, "failed to put cid into local store"), - } - }); - } -} - #[cfg(test)] pub(crate) mod tests { use super::*; @@ -436,9 +366,8 @@ pub(crate) mod tests { async fn lookup_local(&self) -> Result; async fn lookup(&self, peer_id: PeerId) -> Result; async fn block_size(&self, cid: Cid) -> Result; - async fn block_get(&self, cid: Cid, offline: bool) -> Result; + async fn block_get(&self, cid: Cid) -> Result; async fn get(&self, ipfs_path: &IpfsPath) -> Result<(Cid, Ipld), Error>; - async fn put(&self, cid: Cid, blob: Bytes, links: Vec) -> Result<(), Error>; async fn resolve(&self, ipfs_path: &IpfsPath) -> Result<(Cid, String), Error>; async fn peers(&self) -> Result>, Error>; async fn connect(&self, peer_id: PeerId, addrs: Vec) -> Result<(), Error>; diff --git a/one/Cargo.toml b/one/Cargo.toml index 6665756e4..14fa89da1 100644 --- a/one/Cargo.toml +++ b/one/Cargo.toml @@ -17,6 +17,7 @@ ceramic-kubo-rpc = { path = "../kubo-rpc", features = ["http"] } ceramic-kubo-rpc-server.workspace = true ceramic-metrics.workspace = true ceramic-p2p.workspace = true +ceramic-store.workspace = true chrono = "0.4.31" cid.workspace = true clap.workspace = true @@ -30,6 +31,7 @@ home = "0.5" hyper.workspace = true iroh-rpc-client.workspace = true iroh-rpc-types.workspace = true +iroh-bitswap.workspace = true libipld.workspace = true libp2p.workspace = true multiaddr.workspace = true diff --git a/one/src/lib.rs b/one/src/lib.rs index 061cba7cf..dc064696e 100644 --- a/one/src/lib.rs +++ b/one/src/lib.rs @@ -288,7 +288,7 @@ type InterestInterest = FullInterests; type ReconInterest = Server, InterestInterest>; -type ModelStore = SQLiteStore; +type ModelStore = ceramic_store::Store; type ModelInterest = ReconInterestProvider; type ReconModel = Server, ModelInterest>; @@ -296,7 +296,7 @@ struct Daemon { opts: DaemonOpts, peer_id: PeerId, network: ceramic_core::Network, - ipfs: Ipfs, + ipfs: Ipfs, metrics_handle: MetricsHandle, recon_interest: ReconInterest, recon_model: ReconModel, @@ -436,10 +436,10 @@ impl Daemon { ); // Create second recon store for models. - let model_store = StoreMetricsMiddleware::new( - ModelStore::new(sql_pool.clone(), "model".to_string()).await?, - recon_metrics.clone(), - ); + let model_block_store = ModelStore::new(sql_pool.clone()).await?; + + let model_store = + StoreMetricsMiddleware::new(model_block_store.clone(), recon_metrics.clone()); // Construct a recon implementation for interests. let mut recon_interest = Server::new(Recon::new( @@ -464,10 +464,16 @@ impl Daemon { let ipfs_metrics = ceramic_metrics::MetricsHandle::register(ceramic_kubo_rpc::IpfsMetrics::register); let p2p_metrics = ceramic_metrics::MetricsHandle::register(ceramic_p2p::Metrics::register); - let ipfs = Ipfs::builder() - .with_p2p(p2p_config, keypair, recons, sql_pool.clone(), p2p_metrics) + let ipfs = Ipfs::::builder() + .with_p2p( + p2p_config, + keypair, + recons, + model_block_store.clone(), + p2p_metrics, + ) .await? - .build(sql_pool.clone(), ipfs_metrics) + .build(model_block_store, ipfs_metrics) .await?; Ok(Daemon { diff --git a/one/src/network.rs b/one/src/network.rs index 63d5590c0..e844f358f 100644 --- a/one/src/network.rs +++ b/one/src/network.rs @@ -3,9 +3,9 @@ use std::sync::Arc; use anyhow::Result; -use ceramic_core::{EventId, Interest, SqlitePool}; +use ceramic_core::{EventId, Interest}; use ceramic_kubo_rpc::{IpfsMetrics, IpfsMetricsMiddleware, IpfsService}; -use ceramic_p2p::{Config as P2pConfig, Libp2pConfig, Node, SQLiteBlockStore}; +use ceramic_p2p::{Config as P2pConfig, Libp2pConfig, Node}; use iroh_rpc_client::P2pClient; use iroh_rpc_types::{p2p::P2pAddr, Addr}; use libp2p::identity::Keypair; @@ -33,17 +33,18 @@ impl BuilderState for WithP2p {} /// Configure the p2p service impl Builder { - pub async fn with_p2p( + pub async fn with_p2p( self, libp2p_config: Libp2pConfig, keypair: Keypair, recons: Option<(I, M)>, - sql_pool: SqlitePool, + block_store: S, metrics: ceramic_p2p::Metrics, ) -> anyhow::Result> where I: Recon, M: Recon, + S: iroh_bitswap::Store, { let addr = Addr::new_mem(); @@ -51,7 +52,8 @@ impl Builder { config.libp2p = libp2p_config; - let mut p2p = Node::new(config, addr.clone(), keypair, recons, sql_pool, metrics).await?; + let mut p2p = + Node::new(config, addr.clone(), keypair, recons, block_store, metrics).await?; let task = task::spawn(async move { if let Err(err) = p2p.run().await { @@ -70,10 +72,13 @@ impl Builder { /// Finish the build impl Builder { - pub async fn build(self, sql_pool: SqlitePool, ipfs_metrics: IpfsMetrics) -> Result { + pub async fn build(self, block_store: S, ipfs_metrics: IpfsMetrics) -> Result> + where + S: iroh_bitswap::Store, + { let ipfs_service = Arc::new(IpfsService::new( P2pClient::new(self.state.p2p.addr.clone()).await?, - SQLiteBlockStore::new(sql_pool).await?, + block_store, )); let ipfs_service = IpfsMetricsMiddleware::new(ipfs_service, ipfs_metrics); Ok(Ipfs { @@ -84,16 +89,16 @@ impl Builder { } // Provides Ipfs node implementation -pub struct Ipfs { - api: IpfsMetricsMiddleware>, +pub struct Ipfs { + api: IpfsMetricsMiddleware>>, p2p: Service, } -impl Ipfs { +impl Ipfs { pub fn builder() -> Builder { Builder { state: Init {} } } - pub fn api(&self) -> IpfsMetricsMiddleware> { + pub fn api(&self) -> IpfsMetricsMiddleware>> { self.api.clone() } pub async fn stop(self) -> Result<()> { diff --git a/p2p/Cargo.toml b/p2p/Cargo.toml index 61924abb7..855424cc4 100644 --- a/p2p/Cargo.toml +++ b/p2p/Cargo.toml @@ -79,6 +79,7 @@ features = [ criterion.workspace = true rand_chacha.workspace = true test-log.workspace = true +ceramic-store.workspace = true [[bench]] name = "lru_cache" diff --git a/p2p/src/behaviour.rs b/p2p/src/behaviour.rs index 4d2f3cff7..3998c55e8 100644 --- a/p2p/src/behaviour.rs +++ b/p2p/src/behaviour.rs @@ -1,11 +1,8 @@ use std::time::Duration; use anyhow::Result; -use async_trait::async_trait; use ceramic_core::{EventId, Interest}; -use cid::Cid; -use iroh_bitswap::{Bitswap, Block, Config as BitswapConfig, Store}; -use iroh_rpc_client::Client; +use iroh_bitswap::{Bitswap, Block, Config as BitswapConfig}; use libp2p::{ autonat, connection_limits::{self, ConnectionLimits}, @@ -27,7 +24,6 @@ use tracing::{info, warn}; use self::ceramic_peer_manager::CeramicPeerManager; pub use self::event::Event; use crate::config::Libp2pConfig; -use crate::sqliteblockstore::SQLiteBlockStore; use crate::Metrics; mod ceramic_peer_manager; @@ -39,7 +35,10 @@ pub const AGENT_VERSION: &str = concat!("ceramic-one/", env!("CARGO_PKG_VERSION" /// Libp2p behaviour for the node. #[derive(NetworkBehaviour)] #[behaviour(to_swarm = "Event")] -pub(crate) struct NodeBehaviour { +pub(crate) struct NodeBehaviour +where + S: iroh_bitswap::Store, +{ // Place limits first in the behaviour tree. // Behaviours are called in order and the limits behaviour can deny connections etc. // It keeps things simpler in other behaviours if they are never called for connections that @@ -49,7 +48,7 @@ pub(crate) struct NodeBehaviour { pub(crate) peer_manager: CeramicPeerManager, ping: Ping, pub(crate) identify: identify::Behaviour, - pub(crate) bitswap: Toggle>, + pub(crate) bitswap: Toggle>, pub(crate) kad: Toggle>, mdns: Toggle, pub(crate) autonat: Toggle, @@ -59,50 +58,18 @@ pub(crate) struct NodeBehaviour { recon: Toggle>, } -#[derive(Debug, Clone)] -pub(crate) struct BitswapStore(Client); - -#[async_trait] -impl Store for BitswapStore { - async fn get(&self, cid: &Cid) -> Result { - let store = self.0.try_store()?; - let cid = *cid; - let data = store - .get(cid) - .await? - .ok_or_else(|| anyhow::anyhow!("not found"))?; - Ok(Block::new(data, cid)) - } - - async fn get_size(&self, cid: &Cid) -> Result { - let store = self.0.try_store()?; - let cid = *cid; - let size = store - .get_size(cid) - .await? - .ok_or_else(|| anyhow::anyhow!("not found"))?; - Ok(size as usize) - } - - async fn has(&self, cid: &Cid) -> Result { - let store = self.0.try_store()?; - let cid = *cid; - let res = store.has(cid).await?; - Ok(res) - } -} - -impl NodeBehaviour +impl NodeBehaviour where I: Recon, M: Recon, + S: iroh_bitswap::Store, { pub async fn new( local_key: &Keypair, config: &Libp2pConfig, relay_client: Option, recons: Option<(I, M)>, - block_store: SQLiteBlockStore, + block_store: S, metrics: Metrics, ) -> Result { let pub_key = local_key.public(); @@ -116,7 +83,7 @@ where } else { BitswapConfig::default_client_mode() }; - Some(Bitswap::::new(peer_id, block_store, bs_config).await) + Some(Bitswap::::new(peer_id, block_store, bs_config).await) } else { None } @@ -253,19 +220,3 @@ where Ok(()) } } - -#[cfg(test)] -mod tests { - use libp2p::swarm::dummy; - - use super::*; - - fn assert_send() {} - - #[test] - fn test_traits() { - assert_send::>(); - assert_send::, Toggle>>(); - assert_send::<&Bitswap>(); - } -} diff --git a/p2p/src/node.rs b/p2p/src/node.rs index 31ba57048..56c21af27 100644 --- a/p2p/src/node.rs +++ b/p2p/src/node.rs @@ -7,7 +7,7 @@ use std::{sync::atomic::Ordering, time::Duration}; use ahash::AHashMap; use anyhow::{anyhow, bail, Context, Result}; -use ceramic_core::{EventId, Interest, SqlitePool}; +use ceramic_core::{EventId, Interest}; use ceramic_metrics::{libp2p_metrics, Recorder}; use cid::Cid; use futures_util::stream::StreamExt; @@ -61,13 +61,14 @@ pub enum NetworkEvent { /// Node implements a peer to peer node that participates on the Ceramic network. /// /// Node provides an external API via RpcMessages. -pub struct Node +pub struct Node where I: Recon, M: Recon, + S: iroh_bitswap::Store, { metrics: Metrics, - swarm: Swarm>, + swarm: Swarm>, supported_protocols: HashSet, net_receiver_in: Receiver, dial_queries: AHashMap>>>, @@ -87,10 +88,11 @@ where active_address_probe: Option, } -impl fmt::Debug for Node +impl fmt::Debug for Node where I: Recon, M: Recon, + S: iroh_bitswap::Store, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Node") @@ -119,10 +121,11 @@ const NICE_INTERVAL: Duration = Duration::from_secs(6); const BOOTSTRAP_INTERVAL: Duration = Duration::from_secs(5 * 60); const EXPIRY_INTERVAL: Duration = Duration::from_secs(1); -impl Drop for Node +impl Drop for Node where I: Recon, M: Recon, + S: iroh_bitswap::Store, { fn drop(&mut self) { self.rpc_task.abort(); @@ -131,18 +134,19 @@ where // Allow IntoConnectionHandler deprecated associated type. // We are not using IntoConnectionHandler directly only referencing the type as part of this event signature. -type NodeSwarmEvent = SwarmEvent< as NetworkBehaviour>::ToSwarm>; -impl Node +type NodeSwarmEvent = SwarmEvent< as NetworkBehaviour>::ToSwarm>; +impl Node where I: Recon, M: Recon, + S: iroh_bitswap::Store, { pub async fn new( config: Config, rpc_addr: P2pAddr, keypair: Keypair, recons: Option<(I, M)>, - sql_pool: SqlitePool, + block_store: S, metrics: Metrics, ) -> Result { let (network_sender_in, network_receiver_in) = channel(1024); // TODO: configurable @@ -153,12 +157,11 @@ where .. } = config; - let block_store = crate::SQLiteBlockStore::new(sql_pool).await?; let mut swarm = build_swarm( &libp2p_config, keypair, recons, - block_store.clone(), + block_store, metrics.clone(), ) .await?; @@ -449,7 +452,7 @@ where #[tracing::instrument(skip_all)] fn handle_swarm_event( &mut self, - event: NodeSwarmEvent, + event: NodeSwarmEvent, ) -> Result> { libp2p_metrics().record(&event); match event { @@ -1160,7 +1163,7 @@ mod tests { use crate::keys::Keypair; use async_trait::async_trait; - use ceramic_core::RangeOpen; + use ceramic_core::{RangeOpen, SqlitePool}; use futures::TryStreamExt; use rand::prelude::*; use rand_chacha::ChaCha8Rng; @@ -1386,7 +1389,7 @@ mod tests { rpc_server_addr, keypair.into(), None::<(DummyRecon, DummyRecon)>, - sql_pool, + ceramic_store::Store::new(sql_pool).await?, metrics, ) .await?; diff --git a/p2p/src/swarm.rs b/p2p/src/swarm.rs index 2a06e3cc9..421a53e19 100644 --- a/p2p/src/swarm.rs +++ b/p2p/src/swarm.rs @@ -4,18 +4,19 @@ use libp2p::{noise, relay, swarm::Executor, tcp, tls, yamux, Swarm, SwarmBuilder use libp2p_identity::Keypair; use recon::{libp2p::Recon, Sha256a}; -use crate::{behaviour::NodeBehaviour, Libp2pConfig, Metrics, SQLiteBlockStore}; +use crate::{behaviour::NodeBehaviour, Libp2pConfig, Metrics}; -pub(crate) async fn build_swarm( +pub(crate) async fn build_swarm( config: &Libp2pConfig, keypair: Keypair, recons: Option<(I, M)>, - block_store: SQLiteBlockStore, + block_store: S, metrics: Metrics, -) -> Result>> +) -> Result>> where I: Recon, M: Recon, + S: iroh_bitswap::Store, { let builder = SwarmBuilder::with_existing_identity(keypair) .with_tokio() @@ -68,17 +69,18 @@ where } } -fn new_behavior( +fn new_behavior( config: &Libp2pConfig, keypair: &Keypair, relay_client: Option, recons: Option<(I, M)>, - block_store: SQLiteBlockStore, + block_store: S, metrics: Metrics, -) -> Result> +) -> Result> where I: Recon + Send, M: Recon + Send, + S: iroh_bitswap::Store, { // TODO(WS1-1363): Remove bitswap async initialization let keypair = keypair.clone(); diff --git a/recon/src/lib.rs b/recon/src/lib.rs index 5ce39bb23..5a0591905 100644 --- a/recon/src/lib.rs +++ b/recon/src/lib.rs @@ -6,8 +6,8 @@ pub use crate::{ metrics::Metrics, recon::{ btreestore::BTreeStore, sqlitestore::SQLiteStore, store_metrics::StoreMetricsMiddleware, - AssociativeHash, FullInterests, InterestProvider, Key, Range, Recon, ReconInterestProvider, - Store, SyncState, + AssociativeHash, FullInterests, HashCount, InsertResult, InterestProvider, Key, Range, + Recon, ReconInterestProvider, ReconItem, Store, SyncState, }, sha256a::Sha256a, }; diff --git a/recon/src/recon.rs b/recon/src/recon.rs index b49d994c1..2b92efd14 100644 --- a/recon/src/recon.rs +++ b/recon/src/recon.rs @@ -343,6 +343,13 @@ pub struct HashCount { count: u64, } +impl HashCount { + /// Construct a new HashCount + pub fn new(hash: H, count: u64) -> Self { + Self { hash, count } + } +} + impl std::fmt::Debug for HashCount where H: std::fmt::Debug, @@ -377,12 +384,15 @@ impl From for HashCount { } } +/// A key value pair to store #[derive(Clone, Debug)] pub struct ReconItem<'a, K> where K: Key, { + /// The key. pub key: &'a K, + /// The value, if not set no value is stored. pub value: Option<&'a [u8]>, } @@ -390,14 +400,17 @@ impl<'a, K> ReconItem<'a, K> where K: Key, { + /// Construct a new item with a key and optional value pub fn new(key: &'a K, value: Option<&'a [u8]>) -> Self { Self { key, value } } + /// Construct a new with only a key pub fn new_key(key: &'a K) -> Self { Self { key, value: None } } + /// Construct a new with a key and a value pub fn new_with_value(key: &'a K, value: &'a [u8]) -> Self { Self { key, @@ -406,15 +419,18 @@ where } } +/// The result of an insert operation. #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct InsertResult { /// A true/false list indicating whether or not the key was new. /// It is in the same order as the input list of keys. pub keys: Vec, + /// Number of values inserted. pub value_count: usize, } impl InsertResult { + /// Construct an insert result pub fn new(new_keys: Vec, value_count: usize) -> Self { Self { keys: new_keys, diff --git a/store/Cargo.toml b/store/Cargo.toml new file mode 100644 index 000000000..ebb748837 --- /dev/null +++ b/store/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "ceramic-store" +description = "A storage implementation for Ceramic" +version.workspace = true +edition.workspace = true +authors.workspace = true +license.workspace = true +repository.workspace = true +publish = false + +[dependencies] +anyhow.workspace = true +async-trait.workspace = true +bytes.workspace = true +ceramic-core.workspace = true +ceramic-metrics.workspace = true +cid.workspace = true +futures.workspace = true +hex.workspace = true +iroh-bitswap.workspace = true +iroh-car.workspace = true +itertools = "0.12.0" +multihash.workspace = true +recon.workspace = true +sqlx.workspace = true +tracing.workspace = true + +[dev-dependencies] +expect-test.workspace = true +rand.workspace = true +test-log.workspace = true +tokio.workspace = true +tracing-subscriber.workspace = true +libipld.workspace = true +libipld-cbor.workspace = true diff --git a/store/src/lib.rs b/store/src/lib.rs new file mode 100644 index 000000000..d8145c3dd --- /dev/null +++ b/store/src/lib.rs @@ -0,0 +1,714 @@ +//! A unified implementation of both the [`recon::Store`] and [`iroh_bitswap::Store`] traits. +//! This unified implementation allows for exposing Recon values as IPFS blocks +#![warn(missing_docs)] + +#[cfg(test)] +mod tests; + +use std::collections::BTreeSet; + +use anyhow::{anyhow, Result}; +use async_trait::async_trait; +use bytes::Bytes; +use ceramic_core::{DbTx, EventId, RangeOpen, SqlitePool}; +use cid::Cid; +use iroh_bitswap::Block; +use iroh_car::{CarHeader, CarReader, CarWriter}; +use itertools::{process_results, Itertools}; +use multihash::{Code, MultihashDigest}; +use recon::{AssociativeHash, HashCount, InsertResult, Key, ReconItem, Sha256a}; +use sqlx::Row; +use tracing::instrument; + +const SORT_KEY: &str = "model"; + +/// Unified implementation of [`recon::Store`] and [`iroh_bitswap::Store`] that can expose the +/// individual blocks from the CAR files directly. +#[derive(Clone, Debug)] +pub struct Store { + pool: SqlitePool, +} + +#[derive(Debug)] +struct BlockRow { + cid: Cid, + root: bool, + bytes: Vec, +} + +impl Store { + /// Create an instance of the store initializing any neccessary tables. + pub async fn new(pool: SqlitePool) -> Result { + let mut store = Store { pool }; + store.create_table_if_not_exists().await?; + Ok(store) + } + + /// Initialize the recon table. + async fn create_table_if_not_exists(&mut self) -> Result<()> { + // Do we want to remove CID from the table? + const CREATE_STORE_KEY_TABLE: &str = "CREATE TABLE IF NOT EXISTS store_key ( + sort_key TEXT, -- the field in the event header to sort by e.g. model + key BLOB, -- network_id sort_value controller StreamID height event_cid + ahash_0 INTEGER, -- the ahash is decomposed as [u32; 8] + ahash_1 INTEGER, + ahash_2 INTEGER, + ahash_3 INTEGER, + ahash_4 INTEGER, + ahash_5 INTEGER, + ahash_6 INTEGER, + ahash_7 INTEGER, + CID TEXT, + value_retrieved BOOL, -- indicates if we have the value + PRIMARY KEY(sort_key, key) + )"; + const CREATE_VALUE_RETRIEVED_INDEX: &str = + "CREATE INDEX IF NOT EXISTS idx_key_value_retrieved + ON store_key (sort_key, key, value_retrieved)"; + + const CREATE_STORE_BLOCK_TABLE: &str = "CREATE TABLE IF NOT EXISTS store_block ( + sort_key TEXT, -- the field in the event header to sort by e.g. model + key BLOB, -- network_id sort_value controller StreamID height event_cid + idx INTEGER, -- the index of the block in the CAR file + root BOOL, -- when true the block is a root in the CAR file + cid BLOB, -- the cid of the Block as bytes no 0x00 prefix + bytes BLOB, -- the Block + PRIMARY KEY(cid) + )"; + // TODO should this include idx or not? + const CREATE_BLOCK_ORDER_INDEX: &str = "CREATE INDEX IF NOT EXISTS idx_block_idx + ON store_block (sort_key, key)"; + + let mut tx = self.pool.tx().await?; + sqlx::query(CREATE_STORE_KEY_TABLE) + .execute(&mut *tx) + .await?; + sqlx::query(CREATE_VALUE_RETRIEVED_INDEX) + .execute(&mut *tx) + .await?; + sqlx::query(CREATE_STORE_BLOCK_TABLE) + .execute(&mut *tx) + .await?; + sqlx::query(CREATE_BLOCK_ORDER_INDEX) + .execute(&mut *tx) + .await?; + + tx.commit().await?; + Ok(()) + } + /// returns (new_key, new_val) tuple + async fn insert_item_int( + &mut self, + item: &ReconItem<'_, EventId>, + conn: &mut DbTx<'_>, + ) -> Result<(bool, bool)> { + // we insert the value first as it's possible we already have the key and can skip that step + // as it happens in a transaction, we'll roll back the value insert if the key insert fails and try again + if let Some(val) = item.value { + // Update the value_retrieved flag, and report if the key already exists. + let key_exists = self.update_value_retrieved_int(item.key, conn).await?; + + // Put each block from the car file + let mut reader = CarReader::new(val).await?; + let roots: BTreeSet = reader.header().roots().iter().cloned().collect(); + let mut idx = 0; + while let Some((cid, data)) = reader.next_block().await? { + self.insert_block_int(item.key, idx, roots.contains(&cid), cid, &data.into(), conn) + .await?; + idx += 1; + } + + if key_exists { + return Ok((false, true)); + } + } + let new_key = self + .insert_key_int(item.key, item.value.is_some(), conn) + .await?; + Ok((new_key, item.value.is_some())) + } + + // set value_retrieved to true and return if the key already exists + async fn update_value_retrieved_int( + &mut self, + key: &EventId, + conn: &mut DbTx<'_>, + ) -> Result { + let update = sqlx::query( + "UPDATE store_key SET value_retrieved = true WHERE sort_key = ? AND key = ?", + ); + let resp = update + .bind(SORT_KEY) + .bind(key.as_bytes()) + .execute(&mut **conn) + .await?; + let rows_affected = resp.rows_affected(); + debug_assert!(rows_affected <= 1); + Ok(rows_affected == 1) + } + + // store a block in the db. + async fn insert_block_int( + &self, + key: &EventId, + idx: i32, + root: bool, + cid: Cid, + blob: &Bytes, + conn: &mut DbTx<'_>, + ) -> Result<()> { + let hash = match cid.hash().code() { + 0x12 => Code::Sha2_256.digest(blob), + 0x1b => Code::Keccak256.digest(blob), + 0x11 => return Err(anyhow!("Sha1 not supported")), + _ => { + return Err(anyhow!( + "multihash type {:#x} not Sha2_256, Keccak256", + cid.hash().code(), + )) + } + }; + if cid.hash().to_bytes() != hash.to_bytes() { + return Err(anyhow!( + "cid did not match blob {} != {}", + hex::encode(cid.hash().to_bytes()), + hex::encode(hash.to_bytes()) + )); + } + + sqlx::query( + "INSERT INTO store_block (sort_key, key, idx, root, cid, bytes) VALUES (?, ?, ?, ?, ?, ?)", + ) + .bind(SORT_KEY) + .bind(key.as_bytes()) + .bind(idx) + .bind(root) + .bind(cid.to_bytes()) + .bind(blob.to_vec()) + .execute(&mut **conn) + .await?; + Ok(()) + } + + async fn insert_key_int( + &mut self, + key: &EventId, + has_value: bool, + conn: &mut DbTx<'_>, + ) -> Result { + let key_insert = sqlx::query( + "INSERT INTO store_key ( + sort_key, key, + ahash_0, ahash_1, ahash_2, ahash_3, + ahash_4, ahash_5, ahash_6, ahash_7, + value_retrieved + ) VALUES ( + ?, ?, + ?, ?, ?, ?, + ?, ?, ?, ?, + ? + );", + ); + + let hash = Sha256a::digest(key); + let resp = key_insert + .bind(SORT_KEY) + .bind(key.as_bytes()) + .bind(hash.as_u32s()[0]) + .bind(hash.as_u32s()[1]) + .bind(hash.as_u32s()[2]) + .bind(hash.as_u32s()[3]) + .bind(hash.as_u32s()[4]) + .bind(hash.as_u32s()[5]) + .bind(hash.as_u32s()[6]) + .bind(hash.as_u32s()[7]) + .bind(has_value) + .execute(&mut **conn) + .await; + match resp { + std::result::Result::Ok(_rows) => Ok(true), + Err(sqlx::Error::Database(err)) => { + if err.is_unique_violation() { + Ok(false) + } else { + Err(sqlx::Error::Database(err).into()) + } + } + Err(err) => Err(err.into()), + } + } + + async fn rebuild_car(&mut self, blocks: Vec) -> Result>> { + if blocks.is_empty() { + return Ok(None); + } + + let size = blocks.iter().fold(0, |sum, row| sum + row.bytes.len()); + let roots: Vec = blocks + .iter() + .filter(|row| row.root) + .map(|row| row.cid) + .collect(); + // Reconstruct the car file + // TODO figure out a better capacity calculation + let mut car = Vec::with_capacity(size + 100 * blocks.len()); + let mut writer = CarWriter::new(CarHeader::V1(roots.into()), &mut car); + for BlockRow { + cid, + bytes, + root: _, + } in blocks + { + writer.write(cid, bytes).await?; + } + writer.finish().await?; + Ok(Some(car)) + } +} + +#[async_trait] +impl recon::Store for Store { + type Key = EventId; + type Hash = Sha256a; + + /// Returns true if the key was new. The value is always updated if included + async fn insert(&mut self, item: ReconItem<'_, Self::Key>) -> Result { + let mut tx = self.pool.writer().begin().await?; + let (new_key, _new_val) = self.insert_item_int(&item, &mut tx).await?; + tx.commit().await?; + Ok(new_key) + } + + /// Insert new keys into the key space. + /// Returns true if a key did not previously exist. + async fn insert_many<'a, I>(&mut self, items: I) -> Result + where + I: ExactSizeIterator> + Send + Sync, + { + match items.len() { + 0 => Ok(InsertResult::new(vec![], 0)), + _ => { + let mut results = vec![false; items.len()]; + let mut new_val_cnt = 0; + let mut tx = self.pool.writer().begin().await?; + + for (idx, item) in items.enumerate() { + let (new_key, new_val) = self.insert_item_int(&item, &mut tx).await?; + results[idx] = new_key; + if new_val { + new_val_cnt += 1; + } + } + tx.commit().await?; + Ok(InsertResult::new(results, new_val_cnt)) + } + } + } + + /// return the hash and count for a range + #[instrument(skip(self))] + async fn hash_range( + &mut self, + left_fencepost: &Self::Key, + right_fencepost: &Self::Key, + ) -> Result> { + if left_fencepost >= right_fencepost { + return Ok(HashCount::new(Sha256a::identity(), 0)); + } + + let query = sqlx::query( + "SELECT + TOTAL(ahash_0) & 0xFFFFFFFF, TOTAL(ahash_1) & 0xFFFFFFFF, + TOTAL(ahash_2) & 0xFFFFFFFF, TOTAL(ahash_3) & 0xFFFFFFFF, + TOTAL(ahash_4) & 0xFFFFFFFF, TOTAL(ahash_5) & 0xFFFFFFFF, + TOTAL(ahash_6) & 0xFFFFFFFF, TOTAL(ahash_7) & 0xFFFFFFFF, + COUNT(1) + FROM store_key WHERE sort_key = ? AND key > ? AND key < ?;", + ); + let row = query + .bind(SORT_KEY) + .bind(left_fencepost.as_bytes()) + .bind(right_fencepost.as_bytes()) + .fetch_one(self.pool.reader()) + .await?; + let bytes: [u32; 8] = [ + row.get(0), + row.get(1), + row.get(2), + row.get(3), + row.get(4), + row.get(5), + row.get(6), + row.get(7), + ]; + let count: i64 = row.get(8); // sql int type is signed + let count: u64 = count + .try_into() + .expect("COUNT(1) should never return a negative number"); + Ok(HashCount::new(Sha256a::from(bytes), count)) + } + + #[instrument(skip(self))] + async fn range( + &mut self, + left_fencepost: &Self::Key, + right_fencepost: &Self::Key, + offset: usize, + limit: usize, + ) -> Result + Send + 'static>> { + let query = sqlx::query( + " + SELECT + key + FROM + store_key + WHERE + sort_key = ? AND + key > ? AND key < ? + ORDER BY + key ASC + LIMIT + ? + OFFSET + ?; + ", + ); + let rows = query + .bind(SORT_KEY) + .bind(left_fencepost.as_bytes()) + .bind(right_fencepost.as_bytes()) + .bind(limit as i64) + .bind(offset as i64) + .fetch_all(self.pool.reader()) + .await?; + //debug!(count = rows.len(), "rows"); + Ok(Box::new(rows.into_iter().map(|row| { + let bytes: Vec = row.get(0); + EventId::from(bytes) + }))) + } + #[instrument(skip(self))] + async fn range_with_values( + &mut self, + left_fencepost: &Self::Key, + right_fencepost: &Self::Key, + offset: usize, + limit: usize, + ) -> Result)> + Send + 'static>> { + let query = sqlx::query( + " + SELECT + store_block.key, store_block.cid, store_block.root, store_block.bytes + FROM ( + SELECT + key + FROM store_key + WHERE + sort_key = ? + AND key > ? AND key < ? + AND value_retrieved = true + ORDER BY + key ASC + LIMIT + ? + OFFSET + ? + ) key + JOIN + store_block + ON + key.key = store_block.key + ;", + ); + let all_blocks = query + .bind(SORT_KEY) + .bind(left_fencepost.as_bytes()) + .bind(right_fencepost.as_bytes()) + .bind(limit as i64) + .bind(offset as i64) + .fetch_all(self.pool.reader()) + .await?; + + // Consume all block into groups of blocks by their key. + let all_blocks: Vec<(Self::Key, Vec)> = process_results( + all_blocks.into_iter().map(|row| { + Cid::read_bytes(row.get::<&[u8], _>(1)) + .map_err(anyhow::Error::from) + .map(|cid| { + ( + Self::Key::from(row.get::, _>(0)), + cid, + row.get(2), + row.get(3), + ) + }) + }), + |blocks| { + blocks + .group_by(|(key, _, _, _)| key.clone()) + .into_iter() + .map(|(key, group)| { + ( + key, + group + .map(|(_key, cid, root, bytes)| BlockRow { cid, root, bytes }) + .collect::>(), + ) + }) + .collect() + }, + )?; + + let mut values: Vec<(Self::Key, Vec)> = Vec::new(); + for (key, blocks) in all_blocks { + if let Some(value) = self.rebuild_car(blocks).await? { + values.push((key.clone(), value)); + } + } + Ok(Box::new(values.into_iter())) + } + /// Return the number of keys within the range. + #[instrument(skip(self))] + async fn count( + &mut self, + left_fencepost: &Self::Key, + right_fencepost: &Self::Key, + ) -> Result { + let query = sqlx::query( + " + SELECT + count(key) + FROM + store_key + WHERE + sort_key = ? AND + key > ? AND key < ? + ;", + ); + let row = query + .bind(SORT_KEY) + .bind(left_fencepost.as_bytes()) + .bind(right_fencepost.as_bytes()) + .fetch_one(self.pool.reader()) + .await?; + Ok(row.get::<'_, i64, _>(0) as usize) + } + + /// Return the first key within the range. + #[instrument(skip(self))] + async fn first( + &mut self, + left_fencepost: &Self::Key, + right_fencepost: &Self::Key, + ) -> Result> { + let query = sqlx::query( + " + SELECT + key + FROM + store_key + WHERE + sort_key = ? AND + key > ? AND key < ? + ORDER BY + key ASC + LIMIT + 1 + ; ", + ); + let rows = query + .bind(SORT_KEY) + .bind(left_fencepost.as_bytes()) + .bind(right_fencepost.as_bytes()) + .fetch_all(self.pool.reader()) + .await?; + Ok(rows.first().map(|row| { + let bytes: Vec = row.get(0); + EventId::from(bytes) + })) + } + + #[instrument(skip(self))] + async fn last( + &mut self, + left_fencepost: &Self::Key, + right_fencepost: &Self::Key, + ) -> Result> { + let query = sqlx::query( + " + SELECT + key + FROM + store_key + WHERE + sort_key = ? AND + key > ? AND key < ? + ORDER BY + key DESC + LIMIT + 1 + ;", + ); + let rows = query + .bind(SORT_KEY) + .bind(left_fencepost.as_bytes()) + .bind(right_fencepost.as_bytes()) + .fetch_all(self.pool.reader()) + .await?; + Ok(rows.first().map(|row| { + let bytes: Vec = row.get(0); + EventId::from(bytes) + })) + } + + #[instrument(skip(self))] + async fn first_and_last( + &mut self, + left_fencepost: &Self::Key, + right_fencepost: &Self::Key, + ) -> Result> { + let query = sqlx::query( + " + SELECT first.key, last.key + FROM + ( + SELECT key + FROM store_key + WHERE + sort_key = ? AND + key > ? AND key < ? + ORDER BY key ASC + LIMIT 1 + ) as first + JOIN + ( + SELECT key + FROM store_key + WHERE + sort_key = ? AND + key > ? AND key < ? + ORDER BY key DESC + LIMIT 1 + ) as last + ;", + ); + let rows = query + .bind(SORT_KEY) + .bind(left_fencepost.as_bytes()) + .bind(right_fencepost.as_bytes()) + .bind(SORT_KEY) + .bind(left_fencepost.as_bytes()) + .bind(right_fencepost.as_bytes()) + .fetch_all(self.pool.reader()) + .await?; + if let Some(row) = rows.first() { + let first = EventId::from(row.get::, _>(0)); + let last = EventId::from(row.get::, _>(1)); + Ok(Some((first, last))) + } else { + Ok(None) + } + } + + #[instrument(skip(self))] + async fn value_for_key(&mut self, key: &Self::Key) -> Result>> { + let query = sqlx::query( + " + SELECT + cid, root, bytes + FROM store_block + WHERE + sort_key=? + AND key=? + ORDER BY idx + ;", + ); + let blocks = query + .bind(SORT_KEY) + .bind(key.as_bytes()) + .fetch_all(self.pool.reader()) + .await?; + self.rebuild_car( + blocks + .into_iter() + .map(|row| { + Cid::read_bytes(row.get::<&[u8], _>(0)) + .map_err(anyhow::Error::from) + .map(|cid| BlockRow { + cid, + root: row.get(1), + bytes: row.get(2), + }) + }) + .collect::>>()?, + ) + .await + } + + #[instrument(skip(self))] + async fn keys_with_missing_values( + &mut self, + range: RangeOpen, + ) -> Result> { + if range.start >= range.end { + return Ok(vec![]); + }; + let query = sqlx::query( + " + SELECT key + FROM store_key + WHERE + sort_key=? + AND key > ? + AND key < ? + AND value_retrieved = false + ;", + ); + let row = query + .bind(SORT_KEY) + .bind(range.start.as_bytes()) + .bind(range.end.as_bytes()) + .fetch_all(self.pool.reader()) + .await?; + Ok(row + .into_iter() + .map(|row| EventId::from(row.get::, _>(0))) + .collect()) + } +} + +#[async_trait] +impl iroh_bitswap::Store for Store { + async fn get_size(&self, cid: &Cid) -> Result { + Ok( + sqlx::query("SELECT length(bytes) FROM store_block WHERE cid = ?;") + .bind(cid.to_bytes()) + .fetch_one(self.pool.reader()) + .await? + .get::<'_, i64, _>(0) as usize, + ) + } + + async fn get(&self, cid: &Cid) -> Result { + Ok(Block::new( + sqlx::query("SELECT bytes FROM store_block WHERE cid = ?;") + .bind(cid.to_bytes()) + .fetch_one(self.pool.reader()) + .await? + .get::<'_, Vec, _>(0) + .into(), + cid.to_owned(), + )) + } + + async fn has(&self, cid: &Cid) -> Result { + Ok( + sqlx::query("SELECT count(1) FROM store_block WHERE cid = ?;") + .bind(cid.to_bytes()) + .fetch_one(self.pool.reader()) + .await? + .get::<'_, i64, _>(0) + > 0, + ) + } +} diff --git a/store/src/tests.rs b/store/src/tests.rs new file mode 100644 index 000000000..7497b950b --- /dev/null +++ b/store/src/tests.rs @@ -0,0 +1,521 @@ +use super::*; + +use std::str::FromStr; + +use ceramic_core::{ + event_id::{Builder, WithInit}, + Network, SqlitePool, +}; +use cid::Cid; +use expect_test::expect; +use iroh_car::{CarHeader, CarWriter}; +use libipld::{ipld, prelude::Encode, Ipld}; +use libipld_cbor::DagCborCodec; +use rand::Rng; +use test_log::test; + +async fn new_store() -> Store { + let conn = SqlitePool::connect("sqlite::memory:").await.unwrap(); + Store::new(conn).await.unwrap() +} + +#[tokio::test] +async fn get_nonexistent_block() { + let store = new_store().await; + + let cid = Cid::from_str("bafybeibazl2z4vqp2tmwcfag6wirmtpnomxknqcgrauj7m2yisrz3qjbom").unwrap(); // cspell:disable-line + + let exists = iroh_bitswap::Store::has(&store, &cid).await.unwrap(); + assert_eq!(false, exists); +} + +const MODEL_ID: &str = "k2t6wz4yhfp1r5pwi52gw89nzjbu53qk7m32o5iguw42c6knsaj0feuf927agb"; +const CONTROLLER: &str = "did:key:z6Mkqtw7Pj5Lv9xc4PgUYAnwfaVoMC6FRneGWVr5ekTEfKVL"; +const INIT_ID: &str = "baeabeiajn5ypv2gllvkk4muvzujvcnoen2orknxix7qtil2daqn6vu6khq"; + +// Return an builder for an event with the same network,model,controller,stream. +fn event_id_builder() -> Builder { + EventId::builder() + .with_network(&Network::DevUnstable) + .with_sort_value(SORT_KEY, MODEL_ID) + .with_controller(CONTROLLER) + .with_init(&Cid::from_str(INIT_ID).unwrap()) +} + +// Generate an event for the same network,model,controller,stream +// The event and height are random when when its None. +fn random_event_id(height: Option, event: Option<&str>) -> EventId { + event_id_builder() + .with_event_height(height.unwrap_or_else(|| rand::thread_rng().gen())) + .with_event( + &event + .map(|cid| Cid::from_str(cid).unwrap()) + .unwrap_or_else(|| random_cid()), + ) + .build() +} +// The EventId that is the minumum of all possible random event ids +fn random_event_id_min() -> EventId { + event_id_builder().with_min_event_height().build_fencepost() +} +// The EventId that is the maximum of all possible random event ids +fn random_event_id_max() -> EventId { + event_id_builder().with_max_event_height().build_fencepost() +} + +fn random_cid() -> Cid { + let mut data = [0u8; 8]; + rand::Rng::fill(&mut ::rand::thread_rng(), &mut data); + let hash = MultihashDigest::digest(&Code::Sha2_256, &data); + Cid::new_v1(0x00, hash) +} + +async fn build_car_file(count: usize) -> (Vec, Vec) { + let blocks: Vec = (0..count).map(|_| random_block()).collect(); + let root = ipld!( { + "links": blocks.iter().map(|block| Ipld::Link(block.cid)).collect::>(), + }); + let mut root_bytes = Vec::new(); + root.encode(DagCborCodec, &mut root_bytes).unwrap(); + let root_cid = Cid::new_v1(0x71, MultihashDigest::digest(&Code::Sha2_256, &root_bytes)); + let mut car = Vec::new(); + let roots: Vec = vec![root_cid]; + let mut writer = CarWriter::new(CarHeader::V1(roots.into()), &mut car); + writer.write(root_cid, root_bytes).await.unwrap(); + for block in &blocks { + writer.write(block.cid, &block.data).await.unwrap(); + } + writer.finish().await.unwrap(); + (blocks, car) +} + +fn random_block() -> Block { + let mut data = [0u8; 1024]; + rand::Rng::fill(&mut ::rand::thread_rng(), &mut data); + let hash = ::multihash::MultihashDigest::digest(&::multihash::Code::Sha2_256, &data); + Block { + cid: Cid::new_v1(0x00, hash), + data: data.to_vec().into(), + } +} + +#[test(tokio::test)] +async fn hash_range_query() { + let mut store = new_store().await; + recon::Store::insert( + &mut store, + ReconItem::new_key(&random_event_id( + Some(1), + Some("baeabeiazgwnti363jifhxaeaegbluw4ogcd2t5hsjaglo46wuwcgajqa5u"), + )), + ) + .await + .unwrap(); + recon::Store::insert( + &mut store, + ReconItem::new_key(&random_event_id( + Some(2), + Some("baeabeihyl35xdlfju3zrkvy2exmnl6wics3rc5ppz7hwg7l7g4brbtnpny"), + )), + ) + .await + .unwrap(); + let hash = recon::Store::hash_range(&mut store, &random_event_id_min(), &random_event_id_max()) + .await + .unwrap(); + expect!["65C7A25327CC05C19AB5812103EEB8D1156595832B453C7BAC6A186F4811FA0A#2"] + .assert_eq(&format!("{hash}")); +} + +#[test(tokio::test)] +async fn range_query() { + let mut store = new_store().await; + recon::Store::insert( + &mut store, + ReconItem::new_key(&random_event_id( + Some(1), + Some("baeabeichhhmbhsic4maraneqf5gkhekgzcawhtpj3fh6opjtglznapz524"), + )), + ) + .await + .unwrap(); + recon::Store::insert( + &mut store, + ReconItem::new_key(&random_event_id( + Some(2), + Some("baeabeibmek7v4ljsu575ohgjhovdxhcw6p6oivgb55hzkeap5po7ghzqty"), + )), + ) + .await + .unwrap(); + let ids = recon::Store::range( + &mut store, + &random_event_id_min(), + &random_event_id_max(), + 0, + usize::MAX, + ) + .await + .unwrap(); + expect![[r#" + [ + EventId { + network_id: Some( + 2, + ), + separator: Some( + "b51217a029eb540d", + ), + controller: Some( + "4f16d8429ae87f86", + ), + stream_id: Some( + "ead3ca3c", + ), + event_height: Some( + 1, + ), + cid: Some( + "baeabeichhhmbhsic4maraneqf5gkhekgzcawhtpj3fh6opjtglznapz524", + ), + }, + EventId { + network_id: Some( + 2, + ), + separator: Some( + "b51217a029eb540d", + ), + controller: Some( + "4f16d8429ae87f86", + ), + stream_id: Some( + "ead3ca3c", + ), + event_height: Some( + 2, + ), + cid: Some( + "baeabeibmek7v4ljsu575ohgjhovdxhcw6p6oivgb55hzkeap5po7ghzqty", + ), + }, + ] + "#]] + .assert_debug_eq(&ids.collect::>()); +} + +#[test(tokio::test)] +async fn range_query_with_values() { + let mut store = new_store().await; + // Write three keys, two with values and one without + let one_id = random_event_id( + Some(1), + Some("baeabeichhhmbhsic4maraneqf5gkhekgzcawhtpj3fh6opjtglznapz524"), + ); + let two_id = random_event_id( + Some(2), + Some("baeabeibmek7v4ljsu575ohgjhovdxhcw6p6oivgb55hzkeap5po7ghzqty"), + ); + let (_one_blocks, one_car) = build_car_file(2).await; + let (_two_blocks, two_car) = build_car_file(3).await; + recon::Store::insert(&mut store, ReconItem::new(&one_id, Some(&one_car))) + .await + .unwrap(); + recon::Store::insert(&mut store, ReconItem::new(&two_id, Some(&two_car))) + .await + .unwrap(); + recon::Store::insert( + &mut store, + ReconItem::new( + &random_event_id( + Some(2), + Some("baeabeibmek7v4ljsu575ohgjhovdxhcw6p6oivgb55hzkeap5po7ghzqty"), + ), + None, + ), + ) + .await + .unwrap(); + let values: Vec<(EventId, Vec)> = recon::Store::range_with_values( + &mut store, + &random_event_id_min(), + &random_event_id_max(), + 0, + usize::MAX, + ) + .await + .unwrap() + .collect(); + + assert_eq!(vec![(one_id, one_car), (two_id, two_car)], values); +} + +#[test(tokio::test)] +async fn double_insert() { + let mut store = new_store().await; + let id = random_event_id(Some(10), None); + + // do take the first one + expect![ + r#" + Ok( + true, + ) + "# + ] + .assert_debug_eq(&recon::Store::insert(&mut store, ReconItem::new_key(&id)).await); + + // reject the second insert of same key + expect![ + r#" + Ok( + false, + ) + "# + ] + .assert_debug_eq(&recon::Store::insert(&mut store, ReconItem::new_key(&id)).await); +} + +#[test(tokio::test)] +async fn first_and_last() { + let mut store = new_store().await; + recon::Store::insert( + &mut store, + ReconItem::new_key(&random_event_id( + Some(10), + Some("baeabeie2bcird7765t7646jcoatd72tfn2tscdaap7g6kvvy7k43s34aau"), + )), + ) + .await + .unwrap(); + recon::Store::insert( + &mut store, + ReconItem::new_key(&random_event_id( + Some(11), + Some("baeabeianftvrst5bja422dod6uf42pmwkwix6rprguanwsxylfut56e3ue"), + )), + ) + .await + .unwrap(); + + // Only one key in range + let ret = recon::Store::first_and_last( + &mut store, + &event_id_builder().with_event_height(9).build_fencepost(), + &event_id_builder().with_event_height(11).build_fencepost(), + ) + .await + .unwrap(); + expect![[r#" + Some( + ( + EventId { + network_id: Some( + 2, + ), + separator: Some( + "b51217a029eb540d", + ), + controller: Some( + "4f16d8429ae87f86", + ), + stream_id: Some( + "ead3ca3c", + ), + event_height: Some( + 10, + ), + cid: Some( + "baeabeie2bcird7765t7646jcoatd72tfn2tscdaap7g6kvvy7k43s34aau", + ), + }, + EventId { + network_id: Some( + 2, + ), + separator: Some( + "b51217a029eb540d", + ), + controller: Some( + "4f16d8429ae87f86", + ), + stream_id: Some( + "ead3ca3c", + ), + event_height: Some( + 10, + ), + cid: Some( + "baeabeie2bcird7765t7646jcoatd72tfn2tscdaap7g6kvvy7k43s34aau", + ), + }, + ), + ) + "#]] + .assert_debug_eq(&ret); + + // No keys in range + let ret = recon::Store::first_and_last( + &mut store, + &event_id_builder().with_event_height(12).build_fencepost(), + &event_id_builder().with_max_event_height().build_fencepost(), + ) + .await + .unwrap(); + expect![[r#" + None + "#]] + .assert_debug_eq(&ret); + + // Two keys in range + let ret = + recon::Store::first_and_last(&mut store, &random_event_id_min(), &random_event_id_max()) + .await + .unwrap(); + expect![[r#" + Some( + ( + EventId { + network_id: Some( + 2, + ), + separator: Some( + "b51217a029eb540d", + ), + controller: Some( + "4f16d8429ae87f86", + ), + stream_id: Some( + "ead3ca3c", + ), + event_height: Some( + 10, + ), + cid: Some( + "baeabeie2bcird7765t7646jcoatd72tfn2tscdaap7g6kvvy7k43s34aau", + ), + }, + EventId { + network_id: Some( + 2, + ), + separator: Some( + "b51217a029eb540d", + ), + controller: Some( + "4f16d8429ae87f86", + ), + stream_id: Some( + "ead3ca3c", + ), + event_height: Some( + 11, + ), + cid: Some( + "baeabeianftvrst5bja422dod6uf42pmwkwix6rprguanwsxylfut56e3ue", + ), + }, + ), + ) + "#]] + .assert_debug_eq(&ret); +} + +#[test(tokio::test)] +async fn store_value_for_key() { + let mut store = new_store().await; + let key = random_event_id(None, None); + let (_, store_value) = build_car_file(3).await; + recon::Store::insert( + &mut store, + ReconItem::new_with_value(&key, store_value.as_slice()), + ) + .await + .unwrap(); + let value = recon::Store::value_for_key(&mut store, &key) + .await + .unwrap() + .unwrap(); + assert_eq!(hex::encode(store_value), hex::encode(value)); +} +#[test(tokio::test)] +async fn keys_with_missing_value() { + let mut store = new_store().await; + let key = random_event_id( + Some(4), + Some("baeabeigc5edwvc47ul6belpxk3lgddipri5hw6f347s6ur4pdzwceprqbu"), + ); + recon::Store::insert(&mut store, ReconItem::new(&key, None)) + .await + .unwrap(); + let missing_keys = recon::Store::keys_with_missing_values( + &mut store, + (EventId::min_value(), EventId::max_value()).into(), + ) + .await + .unwrap(); + expect![[r#" + [ + EventId { + network_id: Some( + 2, + ), + separator: Some( + "b51217a029eb540d", + ), + controller: Some( + "4f16d8429ae87f86", + ), + stream_id: Some( + "ead3ca3c", + ), + event_height: Some( + 4, + ), + cid: Some( + "baeabeigc5edwvc47ul6belpxk3lgddipri5hw6f347s6ur4pdzwceprqbu", + ), + }, + ] + "#]] + .assert_debug_eq(&missing_keys); + + let (_, value) = build_car_file(2).await; + recon::Store::insert(&mut store, ReconItem::new(&key, Some(&value))) + .await + .unwrap(); + let missing_keys = recon::Store::keys_with_missing_values( + &mut store, + (EventId::min_value(), EventId::max_value()).into(), + ) + .await + .unwrap(); + expect![[r#" + [] + "#]] + .assert_debug_eq(&missing_keys); +} + +#[test(tokio::test)] +async fn read_value_as_block() { + let mut store = new_store().await; + let key = random_event_id(None, None); + let (blocks, store_value) = build_car_file(3).await; + recon::Store::insert( + &mut store, + ReconItem::new_with_value(&key, store_value.as_slice()), + ) + .await + .unwrap(); + let value = recon::Store::value_for_key(&mut store, &key) + .await + .unwrap() + .unwrap(); + assert_eq!(hex::encode(store_value), hex::encode(value)); + + // Read each block from the CAR + for block in blocks { + let value = iroh_bitswap::Store::get(&store, &block.cid).await.unwrap(); + assert_eq!(block, value); + } +}