diff --git a/.deployment/files/known-groups.json b/.deployment/files/known-groups.json index cc90e9e7b..91c22fa98 100644 --- a/.deployment/files/known-groups.json +++ b/.deployment/files/known-groups.json @@ -1,6 +1,6 @@ { - "ROLE_STUDENT": { "label": { "en": "Students", "de": "Studierende" }, "implies": [], "large": true }, - "ROLE_STAFF": { "label": { "en": "Staff", "de": "Angestellte" }, "implies": [], "large": true }, - "ROLE_INSTRUCTOR": { "label": { "en": "Lecturers", "de": "Vortragende" }, "implies": ["ROLE_STAFF"], "large": true }, - "ROLE_TOBIRA_MODERATOR": { "label": { "en": "Moderators", "de": "Moderierende" }, "implies": ["ROLE_STAFF"], "large": false } + "ROLE_STUDENT": { "label": { "default": "Students", "de": "Studierende" }, "implies": [], "large": true }, + "ROLE_STAFF": { "label": { "default": "Staff", "de": "Angestellte" }, "implies": [], "large": true }, + "ROLE_INSTRUCTOR": { "label": { "default": "Lecturers", "de": "Vortragende" }, "implies": ["ROLE_STAFF"], "large": true }, + "ROLE_TOBIRA_MODERATOR": { "label": { "default": "Moderators", "de": "Moderierende" }, "implies": ["ROLE_STAFF"], "large": false } } diff --git a/.deployment/setup-server.yml b/.deployment/setup-server.yml index 42614bf6e..a5ed130d6 100644 --- a/.deployment/setup-server.yml +++ b/.deployment/setup-server.yml @@ -33,10 +33,10 @@ - name: install MeiliSearch become: true get_url: - url: https://github.com/meilisearch/meilisearch/releases/download/v1.4.2/meilisearch-linux-amd64 + url: https://github.com/meilisearch/meilisearch/releases/download/v1.12.6/meilisearch-linux-amd64 dest: /opt/meili/meilisearch mode: '0755' - checksum: 'sha256:b54b9ace213b0d45558c5d0e79710f718b63d2e29c190fb95be01dc27eb1ca5c' + checksum: 'sha256:a9ca34a578fd1b3f5e68dab69f396bcc02cf0bfc1409c377efde25df43cb6809' register: meili_updated notify: restart MeiliSearch diff --git a/.deployment/templates/config.toml b/.deployment/templates/config.toml index c01c54795..3e9727767 100644 --- a/.deployment/templates/config.toml +++ b/.deployment/templates/config.toml @@ -1,12 +1,12 @@ [general] -site_title.en = "Tobira Test Deployment" +site_title.default = "Tobira Test Deployment" tobira_url = "https://{% if id != 'main' %}{{id}}.{% endif %}tobira.opencast.org" users_searchable = true [general.metadata] dcterms.source = "builtin:source" dcterms.license = "builtin:license" -dcterms.spatial = { en = "Location", de = "Ort" } +dcterms.spatial = { default = "Location", de = "Ort" } [db] database = "tobira-{{ id }}" @@ -25,7 +25,7 @@ unix_socket_permissions = 0o777 [auth] source = "tobira-session" session.from_login_credentials = "login-callback:http+unix://[/opt/tobira/{{ id }}/socket/auth.sock]/" -login_page.note.en = 'Dummy users: "jose", "morgan", "björk" and "sabine". Password for all: "tobira".' +login_page.note.default = 'Dummy users: "jose", "morgan", "björk" and "sabine". Password for all: "tobira".' login_page.note.de = 'Testnutzer: "jose", "morgan", "björk" und "sabine". Passwort für alle: "tobira".' trusted_external_key = "tobira" @@ -45,19 +45,17 @@ file = "/var/log/tobira/{{ id }}-${cmd}.log" [opencast] host = "https://tobira-test-oc.ethz.ch" - -[sync] user = "admin" password = "{{ opencast_admin_password }}" + +[sync] poll_period = "1min" +interpret_eth_passwords = true [theme] -logo.large.path = "/opt/tobira/{{ id }}/logo-large.svg" -logo.large.resolution = [643, 217] -logo.large_dark.path = "/opt/tobira/{{ id }}/logo-large-dark.svg" -logo.large_dark.resolution = [643, 217] -logo.small.path = "/opt/tobira/{{ id }}/logo-small.svg" -logo.small.resolution = [102, 115] -logo.small_dark.path = "/opt/tobira/{{ id }}/logo-small.svg" -logo.small_dark.resolution = [212, 182] favicon = "/opt/tobira/{{ id }}/favicon.svg" +logos = [ + { path = "/opt/tobira/{{ id }}/logo-large.svg", mode = "light", size = "wide", resolution = [425, 182] }, + { path = "/opt/tobira/{{ id }}/logo-large-dark.svg", mode = "dark", size = "wide", resolution = [425, 182] }, + { path = "/opt/tobira/{{ id }}/logo-small.svg", size = "narrow", resolution = [212, 182] }, +] diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9e30f9674..ed5c5aa31 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -123,6 +123,8 @@ jobs: .cache-key .cargo-flags .deployment/templates/config.toml + .deployment/files/known-groups.json + .deployment/files/known-users.json util/dummy-login/dist/index.js # This is uploaded for the test job, specifically `cargo test` diff --git a/.github/workflows/upload-db-dump.yml b/.github/workflows/upload-db-dump.yml index 09ac8a730..06f894192 100644 --- a/.github/workflows/upload-db-dump.yml +++ b/.github/workflows/upload-db-dump.yml @@ -35,6 +35,7 @@ jobs: run: | sed --in-place \ -e 's/host = "http:\/\/localhost:8081"/host = "https:\/\/tobira-test-oc.ethz.ch"/g' \ + -e '/password = "opencast"/a\interpret_eth_passwords = true' \ -e 's/password = "opencast"/password = "${{ secrets.TOBIRA_OPENCAST_ADMIN_PASSWORD }}"/g' \ -e 's/level = "trace"/level = "debug"/g' \ -e '/preferred_harvest_size/d' \ @@ -42,6 +43,8 @@ jobs: - name: Sync with Opencast run: ./tobira sync run --config util/dev-config/config.toml + - name: Fetch text assets from Opencast + run: ./tobira sync texts fetch --config util/dev-config/config.toml - name: Import realm tree run: | ./tobira import-realm-tree \ diff --git a/.gitignore b/.gitignore index 4cb44b1b2..65821ec02 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ /deploy +/util/.db-dumps-cache diff --git a/backend/Cargo.lock b/backend/Cargo.lock index 57a8674c4..08d7d5ae1 100644 --- a/backend/Cargo.lock +++ b/backend/Cargo.lock @@ -71,9 +71,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.15" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" dependencies = [ "anstyle", "anstyle-parse", @@ -86,43 +86,43 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.8" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] name = "anstyle-parse" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.4" +version = "3.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" +checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" dependencies = [ "anstyle", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anyhow" -version = "1.0.91" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8" +checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" dependencies = [ "backtrace", ] @@ -142,23 +142,6 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d62b7694a562cdf5a74227903507c56ab2cc8bdd1f781ed5cb4cf9c9f810bfc" -[[package]] -name = "ascii" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eab1c04a571841102f5345a8fc0f6bb3d31c315dec879b5c6e42e40ce7ffa34e" - -[[package]] -name = "async-channel" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" -dependencies = [ - "concurrent-queue", - "event-listener", - "futures-core", -] - [[package]] name = "async-trait" version = "0.1.83" @@ -167,7 +150,7 @@ checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.82", + "syn", ] [[package]] @@ -176,6 +159,18 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" +[[package]] +name = "auto_enums" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "459b77b7e855f875fd15f101064825cd79eb83185a961d66e6298560126facfb" +dependencies = [ + "derive_utils", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "autocfg" version = "1.4.0" @@ -194,7 +189,7 @@ dependencies = [ "miniz_oxide", "object", "rustc-demangle", - "windows-targets 0.52.6", + "windows-targets", ] [[package]] @@ -221,16 +216,6 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" -[[package]] -name = "bcder" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c627747a6774aab38beb35990d88309481378558875a41da1a4b2e373c906ef0" -dependencies = [ - "bytes", - "smallvec", -] - [[package]] name = "bincode" version = "1.3.3" @@ -240,12 +225,6 @@ dependencies = [ "serde", ] -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - [[package]] name = "bitflags" version = "2.6.0" @@ -284,9 +263,9 @@ dependencies = [ [[package]] name = "bstr" -version = "1.10.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40723b8fb387abc38f4f4a37c09073622e41dd12327033091ef8950659e6dc0c" +checksum = "786a307d683a5bf92e6fd5fd69a7eb613751668d1d8d67d802846dfe367c62c8" dependencies = [ "memchr", "regex-automata", @@ -345,21 +324,15 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" - -[[package]] -name = "castaway" -version = "0.1.2" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" [[package]] name = "cc" -version = "1.1.31" +version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2e7962b54006dcfcc61cb72735f4d89bb97061dd6a7ed882ec6b8ee53714c6f" +checksum = "27f657647bcff5394bf56c7317665bbf790a137a50eaaa5c6bfbb9e27a518f2d" dependencies = [ "jobserver", "libc", @@ -380,22 +353,22 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" dependencies = [ "android-tzdata", "iana-time-zone", "num-traits", "serde", - "windows-targets 0.52.6", + "windows-targets", ] [[package]] name = "clap" -version = "4.5.20" +version = "4.5.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97f376d85a664d5837dbae44bf546e6477a679ff6610010f17276f686d867e8" +checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84" dependencies = [ "clap_builder", "clap_derive", @@ -403,9 +376,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.20" +version = "4.5.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19bc80abd44e4bed93ca373a0704ccbd1b710dc5749406201bb018272808dc54" +checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838" dependencies = [ "anstream", "anstyle", @@ -422,41 +395,29 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.82", + "syn", ] [[package]] name = "clap_lex" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "colorchoice" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "combine" -version = "3.8.1" +version = "4.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da3da6baa321ec19e1cc41d31bf599f00c783d0517095cdaf0332e3fe8d20680" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" dependencies = [ - "ascii", - "byteorder", - "either", + "bytes", "memchr", - "unreachable", -] - -[[package]] -name = "concurrent-queue" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" -dependencies = [ - "crossbeam-utils", ] [[package]] @@ -479,7 +440,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.82", + "syn", ] [[package]] @@ -509,9 +470,9 @@ dependencies = [ [[package]] name = "core-foundation" -version = "0.9.4" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" dependencies = [ "core-foundation-sys", "libc", @@ -525,9 +486,9 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.14" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" dependencies = [ "libc", ] @@ -541,12 +502,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "crossbeam-utils" -version = "0.8.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" - [[package]] name = "crypto-bigint" version = "0.5.5" @@ -569,44 +524,12 @@ dependencies = [ "typenum", ] -[[package]] -name = "curl" -version = "0.4.47" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9fb4d13a1be2b58f14d60adba57c9834b78c62fd86c3e76a148f732686e9265" -dependencies = [ - "curl-sys", - "libc", - "openssl-probe", - "openssl-sys", - "schannel", - "socket2", - "windows-sys 0.52.0", -] - -[[package]] -name = "curl-sys" -version = "0.4.77+curl-8.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f469e8a5991f277a208224f6c7ad72ecb5f986e36d09ae1f2c1bb9259478a480" -dependencies = [ - "cc", - "libc", - "libnghttp2-sys", - "libz-sys", - "openssl-sys", - "pkg-config", - "vcpkg", - "windows-sys 0.52.0", -] - [[package]] name = "deadpool" -version = "0.10.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb84100978c1c7b37f09ed3ce3e5f843af02c2a2c431bae5b19230dad2c1b490" +checksum = "6541a3916932fe57768d4be0b1ffb5ec7cbf74ca8c903fdfd5c0fe8aa958f0ed" dependencies = [ - "async-trait", "deadpool-runtime", "num_cpus", "tokio", @@ -614,11 +537,13 @@ dependencies = [ [[package]] name = "deadpool-postgres" -version = "0.12.1" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda39fa1cfff190d8924d447ad04fd22772c250438ca5ce1dfb3c80621c05aaa" +checksum = "1ab8a4ea925ce79678034870834602a2980f4b88c09e97feb266496dbb4493d2" dependencies = [ + "async-trait", "deadpool", + "getrandom", "tokio", "tokio-postgres", "tracing", @@ -640,10 +565,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" dependencies = [ "const-oid", + "der_derive", + "flagset", "pem-rfc7468", "zeroize", ] +[[package]] +name = "der_derive" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8034092389675178f570469e6c3b0465d3d30b4505c294a6550db47f3c17ad18" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "deranged" version = "0.3.11" @@ -656,13 +594,13 @@ dependencies = [ [[package]] name = "derive_utils" -version = "0.11.2" +version = "0.14.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "532b4c15dccee12c7044f1fcad956e98410860b22231e44a3b827464797ca7bf" +checksum = "65f152f4b8559c4da5d574bafc7af85454d706b4c5fe8b530d508cacbb6807ea" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn", ] [[package]] @@ -677,6 +615,17 @@ dependencies = [ "subtle", ] +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "dtoa" version = "1.0.9" @@ -730,15 +679,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "encoding_rs" -version = "0.8.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" -dependencies = [ - "cfg-if", -] - [[package]] name = "equivalent" version = "1.0.1" @@ -747,20 +687,14 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] -[[package]] -name = "event-listener" -version = "2.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" - [[package]] name = "fallible-iterator" version = "0.2.0" @@ -773,15 +707,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd2e7510819d6fbf51a5545c8f922716ecfb14df168a3242f7d33e0239efe6a1" -[[package]] -name = "fastrand" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" -dependencies = [ - "instant", -] - [[package]] name = "ff" version = "0.13.0" @@ -792,11 +717,17 @@ dependencies = [ "subtle", ] +[[package]] +name = "flagset" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3ea1ec5f8307826a5b71094dd91fc04d4ae75d5709b20ad351c7fb4815c86ec" + [[package]] name = "flate2" -version = "1.0.34" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" dependencies = [ "crc32fast", "miniz_oxide", @@ -848,17 +779,6 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" -[[package]] -name = "futures-enum" -version = "0.1.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3422d14de7903a52e9dbc10ae05a7e14445ec61890100e098754e120b2bd7b1e" -dependencies = [ - "derive_utils", - "quote", - "syn 1.0.109", -] - [[package]] name = "futures-executor" version = "0.3.31" @@ -876,21 +796,6 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" -[[package]] -name = "futures-lite" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" -dependencies = [ - "fastrand", - "futures-core", - "futures-io", - "memchr", - "parking", - "pin-project-lite", - "waker-fn", -] - [[package]] name = "futures-macro" version = "0.3.31" @@ -899,7 +804,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.82", + "syn", ] [[package]] @@ -950,8 +855,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi", + "wasm-bindgen", ] [[package]] @@ -966,7 +873,7 @@ version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b903b73e45dc0c6c596f2d37eccece7c1c8bb6e4407b001096387c63d0d93724" dependencies = [ - "bitflags 2.6.0", + "bitflags", "libc", "libgit2-sys", "log", @@ -981,12 +888,12 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "graphql-parser" -version = "0.3.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1abd4ce5247dfc04a03ccde70f87a048458c9356c7e41d21ad8c407b3dde6f2" +checksum = "7a818c0d883d7c0801df27be910917750932be279c7bc82dc541b8769425f409" dependencies = [ "combine", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -1002,17 +909,17 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" +checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" dependencies = [ "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", - "http 1.1.0", - "indexmap 2.6.0", + "http", + "indexmap", "slab", "tokio", "tokio-util", @@ -1021,15 +928,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - -[[package]] -name = "hashbrown" -version = "0.15.0" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" [[package]] name = "heck" @@ -1080,20 +981,9 @@ dependencies = [ [[package]] name = "http" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - -[[package]] -name = "http" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" dependencies = [ "bytes", "fnv", @@ -1107,7 +997,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.1.0", + "http", ] [[package]] @@ -1118,7 +1008,7 @@ checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", "futures-util", - "http 1.1.0", + "http", "http-body", "pin-project-lite", ] @@ -1137,15 +1027,15 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbbff0a806a4728c99295b254c8838933b5b082d75e3cb70c8dab21fdfbcfa9a" +checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" dependencies = [ "bytes", "futures-channel", "futures-util", "h2", - "http 1.1.0", + "http", "http-body", "httparse", "httpdate", @@ -1158,12 +1048,12 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.26.0" +version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0bea761b46ae2b24eb4aef630d8d1c398157b6fc29e6350ecf090a0b70c952c" +checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" dependencies = [ "futures-util", - "http 1.1.0", + "http", "hyper", "hyper-util", "log", @@ -1173,18 +1063,19 @@ dependencies = [ "tokio", "tokio-rustls", "tower-service", + "webpki-roots", ] [[package]] name = "hyper-util" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.1.0", + "http", "http-body", "hyper", "pin-project-lite", @@ -1233,78 +1124,167 @@ dependencies = [ ] [[package]] -name = "idna" -version = "0.5.0" +name = "icu_collections" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" dependencies = [ - "unicode-bidi", - "unicode-normalization", + "displaydoc", + "yoke", + "zerofrom", + "zerovec", ] [[package]] -name = "indexmap" -version = "1.9.3" +name = "icu_locid" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" dependencies = [ - "autocfg", - "hashbrown 0.12.3", - "serde", + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", ] [[package]] -name = "indexmap" -version = "2.6.0" +name = "icu_locid_transform" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" dependencies = [ - "equivalent", - "hashbrown 0.15.0", + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", ] [[package]] -name = "instant" -version = "0.1.13" +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" dependencies = [ - "cfg-if", + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", ] [[package]] -name = "is_terminal_polyfill" -version = "1.70.1" +name = "icu_normalizer_data" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" [[package]] -name = "isahc" -version = "1.7.2" +name = "icu_properties" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "334e04b4d781f436dc315cb1e7515bd96826426345d498149e4bde36b67f8ee9" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" dependencies = [ - "async-channel", - "castaway", - "crossbeam-utils", - "curl", - "curl-sys", - "encoding_rs", - "event-listener", - "futures-lite", - "http 0.2.12", - "log", - "mime", - "once_cell", - "polling", - "slab", - "sluice", - "tracing", - "tracing-futures", - "url", - "waker-fn", + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" +dependencies = [ + "equivalent", + "hashbrown", + "serde", ] +[[package]] +name = "ipnet" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddc24109865250148c2e0f3d25d4f0f479571723792d3802153c60922a4fb708" + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + [[package]] name = "iso8601" version = "0.6.1" @@ -1316,9 +1296,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "jobserver" @@ -1331,54 +1311,58 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.72" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" +checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7" dependencies = [ + "once_cell", "wasm-bindgen", ] [[package]] name = "jsonwebtoken" -version = "8.3.0" +version = "9.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6971da4d9c3aa03c3d8f3ff0f4155b534aad021292003895a469716b2a230378" +checksum = "b9ae10193d25051e74945f1ea2d0b42e03cc3b890f7e4cc5faa44997d808193f" dependencies = [ "base64 0.21.7", - "ring 0.16.20", + "js-sys", + "ring", "serde", "serde_json", ] [[package]] name = "juniper" -version = "0.15.12" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "875dca5a0c08b1521e1bb0ed940e9955a9f38971008aaa2a9f64a2ac6b59e1b5" +checksum = "943306315b1a7a03d27af9dfb0c288d9f4da8830c17df4bceb7d50a47da0982c" dependencies = [ + "anyhow", "async-trait", + "auto_enums", "chrono", "fnv", "futures", - "futures-enum", "graphql-parser", - "indexmap 1.9.3", + "indexmap", "juniper_codegen", "serde", "smartstring", "static_assertions", + "void", ] [[package]] name = "juniper_codegen" -version = "0.15.9" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aee97671061ad50301ba077d054d295e01d31a1868fbd07902db651f987e71db" +checksum = "760dbe46660494d469023d661e8d268f413b2cb68c999975dcc237407096a693" dependencies = [ - "proc-macro-error", "proc-macro2", "quote", - "syn 1.0.109", + "syn", + "url", ] [[package]] @@ -1389,9 +1373,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.161" +version = "0.2.168" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1" +checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d" [[package]] name = "libgit2-sys" @@ -1406,25 +1390,15 @@ dependencies = [ ] [[package]] -name = "libnghttp2-sys" -version = "0.1.10+1.61.0" +name = "libz-sys" +version = "1.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "959c25552127d2e1fa72f0e52548ec04fc386e827ba71a7bd01db46a447dc135" +checksum = "d2d16453e800a8cf6dd2fc3eb4bc99b786a9b90c663b8559a5b1a041bf89e472" dependencies = [ "cc", "libc", -] - -[[package]] -name = "libz-sys" -version = "1.1.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2d16453e800a8cf6dd2fc3eb4bc99b786a9b90c663b8559a5b1a041bf89e472" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", + "pkg-config", + "vcpkg", ] [[package]] @@ -1433,6 +1407,12 @@ version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +[[package]] +name = "litemap" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" + [[package]] name = "litrs" version = "0.2.3" @@ -1479,38 +1459,35 @@ dependencies = [ [[package]] name = "meilisearch-index-setting-macro" -version = "0.24.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f2124b55b9cb28e6a08b28854f4e834a51333cbdc2f72935f401efa686c13c" +version = "0.27.1" dependencies = [ "convert_case", "proc-macro2", "quote", - "syn 1.0.109", + "structmeta", + "syn", ] [[package]] name = "meilisearch-sdk" -version = "0.24.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2257ea8ed24b079c21570f473e58cccc3de23b46cee331fc513fccdc3f1ae5a1" +version = "0.27.1" dependencies = [ "async-trait", + "bytes", "either", "futures", "futures-io", - "isahc", "iso8601", - "js-sys", "jsonwebtoken", "log", "meilisearch-index-setting-macro", + "pin-project-lite", + "reqwest", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "time", "uuid", - "wasm-bindgen", "wasm-bindgen-futures", "web-sys", "yaup", @@ -1555,11 +1532,10 @@ dependencies = [ [[package]] name = "mio" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ - "hermit-abi", "libc", "wasi", "windows-sys 0.52.0", @@ -1660,28 +1636,6 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" -[[package]] -name = "openssl-src" -version = "300.4.0+3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a709e02f2b4aca747929cca5ed248880847c650233cf8b8cdc48f40aaf4898a6" -dependencies = [ - "cc", -] - -[[package]] -name = "openssl-sys" -version = "0.9.104" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45abf306cbf99debc8195b66b7346498d7b10c210de50418b5ccd7ceba08c741" -dependencies = [ - "cc", - "libc", - "openssl-src", - "pkg-config", - "vcpkg", -] - [[package]] name = "overload" version = "0.1.1" @@ -1732,15 +1686,9 @@ dependencies = [ "by_address", "proc-macro2", "quote", - "syn 2.0.82", + "syn", ] -[[package]] -name = "parking" -version = "2.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" - [[package]] name = "parking_lot" version = "0.12.3" @@ -1761,7 +1709,7 @@ dependencies = [ "libc", "redox_syscall", "smallvec", - "windows-targets 0.52.6", + "windows-targets", ] [[package]] @@ -1771,15 +1719,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] -name = "pem" -version = "3.0.4" +name = "peg" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e459365e590736a54c3fa561947c84837534b8e9af6fc5bf781307e82658fae" +checksum = "295283b02df346d1ef66052a757869b2876ac29a6bb0ac3f5f7cd44aebe40e8f" dependencies = [ - "base64 0.22.1", - "serde", + "peg-macros", + "peg-runtime", ] +[[package]] +name = "peg-macros" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdad6a1d9cf116a059582ce415d5f5566aabcd4008646779dab7fdc2a9a9d426" +dependencies = [ + "peg-runtime", + "proc-macro2", + "quote", +] + +[[package]] +name = "peg-runtime" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3aeb8f54c078314c2065ee649a7241f46b9d8e418e1a9581ba0546657d7aa3a" + [[package]] name = "pem-rfc7468" version = "0.7.0" @@ -1813,31 +1778,11 @@ dependencies = [ "siphasher", ] -[[package]] -name = "pin-project" -version = "1.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf123a161dde1e524adf36f90bc5d8d3462824a9c43553ad07a8183161189ec" -dependencies = [ - "pin-project-internal", -] - -[[package]] -name = "pin-project-internal" -version = "1.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4502d8515ca9f32f1fb543d987f63d95a14934883db45bdb48060b6b69257f8" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.82", -] - [[package]] name = "pin-project-lite" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" [[package]] name = "pin-utils" @@ -1861,22 +1806,6 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" -[[package]] -name = "polling" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" -dependencies = [ - "autocfg", - "bitflags 1.3.2", - "cfg-if", - "concurrent-queue", - "libc", - "log", - "pin-project-lite", - "windows-sys 0.48.0", -] - [[package]] name = "postgres-derive" version = "0.4.6" @@ -1886,7 +1815,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.82", + "syn", ] [[package]] @@ -1947,35 +1876,11 @@ dependencies = [ "elliptic-curve", ] -[[package]] -name = "proc-macro-error" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" -dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "syn 1.0.109", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", -] - [[package]] name = "proc-macro2" -version = "1.0.89" +version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" dependencies = [ "unicode-ident", ] @@ -1986,7 +1891,7 @@ version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc5b72d8145275d844d4b5f6d4e1eef00c8cd889edb6035c21675d1bb1f45c9f" dependencies = [ - "bitflags 2.6.0", + "bitflags", "chrono", "flate2", "hex", @@ -2000,7 +1905,7 @@ version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "239df02d8349b06fc07398a3a1697b06418223b1c7725085e801e7c0fc6a12ec" dependencies = [ - "bitflags 2.6.0", + "bitflags", "chrono", "hex", ] @@ -2025,7 +1930,59 @@ checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.82", + "syn", +] + +[[package]] +name = "quinn" +version = "0.11.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62e96808277ec6f97351a2380e6c25114bc9e67037775464979f3037c92d05ef" +dependencies = [ + "bytes", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls", + "socket2", + "thiserror 2.0.6", + "tokio", + "tracing", +] + +[[package]] +name = "quinn-proto" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2fe5ef3495d7d2e377ff17b1a8ce2ee2ec2a18cde8b6ad6619d65d0701c135d" +dependencies = [ + "bytes", + "getrandom", + "rand", + "ring", + "rustc-hash", + "rustls", + "rustls-pki-types", + "slab", + "thiserror 2.0.6", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52cd4b1eff68bf27940dd39811292c49e007f4d0b4c357358dc9b0197be6b527" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2", + "tracing", + "windows-sys 0.59.0", ] [[package]] @@ -2069,18 +2026,18 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" +checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" dependencies = [ - "bitflags 2.6.0", + "bitflags", ] [[package]] name = "regex" -version = "1.11.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38200e5ee88914975b69f657f0801b6f6dccafd44fd9326302a4aaeecfacb1d8" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", @@ -2090,9 +2047,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", @@ -2120,7 +2077,7 @@ dependencies = [ "glob", "reinda-macros", "sha2", - "thiserror", + "thiserror 1.0.69", "tokio", ] @@ -2139,28 +2096,58 @@ dependencies = [ ] [[package]] -name = "rfc6979" -version = "0.4.0" +name = "reqwest" +version = "0.12.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" +checksum = "a77c62af46e79de0a562e1a9849205ffcb7fc1238876e9bd743357570e04046f" dependencies = [ - "hmac", - "subtle", + "base64 0.22.1", + "bytes", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "once_cell", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls", + "rustls-pemfile", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-rustls", + "tokio-util", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "webpki-roots", + "windows-registry", ] [[package]] -name = "ring" -version = "0.16.20" +name = "rfc6979" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" dependencies = [ - "cc", - "libc", - "once_cell", - "spin 0.5.2", - "untrusted 0.7.1", - "web-sys", - "winapi", + "hmac", + "subtle", ] [[package]] @@ -2173,8 +2160,8 @@ dependencies = [ "cfg-if", "getrandom", "libc", - "spin 0.9.8", - "untrusted 0.9.0", + "spin", + "untrusted", "windows-sys 0.52.0", ] @@ -2184,27 +2171,34 @@ version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +[[package]] +name = "rustc-hash" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" + [[package]] name = "rustix" -version = "0.38.37" +version = "0.38.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" +checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85" dependencies = [ - "bitflags 2.6.0", + "bitflags", "errno", "libc", "linux-raw-sys", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "rustls" -version = "0.22.4" +version = "0.23.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf4ef73721ac7bcd79b2b315da7779d8fc09718c6b3d2d1b2d94850eb8c18432" +checksum = "5065c3f250cbd332cd894be57c40fa52387247659b14a2d6041d121547903b1b" dependencies = [ "log", - "ring 0.17.8", + "once_cell", + "ring", "rustls-pki-types", "rustls-webpki", "subtle", @@ -2213,12 +2207,11 @@ dependencies = [ [[package]] name = "rustls-native-certs" -version = "0.7.3" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" +checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" dependencies = [ "openssl-probe", - "rustls-pemfile", "rustls-pki-types", "schannel", "security-framework", @@ -2238,6 +2231,9 @@ name = "rustls-pki-types" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" +dependencies = [ + "web-time", +] [[package]] name = "rustls-webpki" @@ -2245,9 +2241,9 @@ version = "0.102.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" dependencies = [ - "ring 0.17.8", + "ring", "rustls-pki-types", - "untrusted 0.9.0", + "untrusted", ] [[package]] @@ -2258,18 +2254,18 @@ checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "scc" -version = "2.2.2" +version = "2.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2c1f7fc6deb21665a9060dfc7d271be784669295a31babdcd4dd2c79ae8cbfb" +checksum = "66b202022bb57c049555430e11fc22fea12909276a80a4c3d368da36ac1d88ed" dependencies = [ "sdd", ] [[package]] name = "schannel" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01227be5826fa0690321a2ba6c5cd57a19cf3f6a09e76973b58e61de6ab9d1c1" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" dependencies = [ "windows-sys 0.59.0", ] @@ -2303,9 +2299,9 @@ dependencies = [ [[package]] name = "secrecy" -version = "0.8.0" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bd1c54ea06cfd2f6b63219704de0b9b4f72dcc2b8fdef820be6cd799780e91e" +checksum = "e891af845473308773346dc847b2c23ee78fe442e0472ac50e22a18a93d3ae5a" dependencies = [ "serde", "zeroize", @@ -2313,11 +2309,11 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.11.1" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +checksum = "e1415a607e92bec364ea2cf9264646dcce0f91e6d65281bd6f2819cca3bf39c8" dependencies = [ - "bitflags 2.6.0", + "bitflags", "core-foundation", "core-foundation-sys", "libc", @@ -2326,9 +2322,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.12.0" +version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea4a292869320c0272d7bc55a5a6aafaff59b4f63404a003887b679a2e05b4b6" +checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2" dependencies = [ "core-foundation-sys", "libc", @@ -2336,29 +2332,29 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.213" +version = "1.0.216" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ea7893ff5e2466df8d720bb615088341b295f849602c6956047f8f80f0e9bc1" +checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.213" +version = "1.0.216" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e85ad2009c50b58e87caa8cd6dac16bdf511bbfb7af6c33df902396aa480fa5" +checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.82", + "syn", ] [[package]] name = "serde_json" -version = "1.0.132" +version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" +checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" dependencies = [ "itoa", "memchr", @@ -2375,13 +2371,25 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + [[package]] name = "serde_yaml" version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.6.0", + "indexmap", "itoa", "ryu", "serde", @@ -2398,6 +2406,17 @@ dependencies = [ "serde", ] +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + [[package]] name = "sha2" version = "0.10.8" @@ -2449,17 +2468,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "sluice" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d7400c0eff44aa2fcb5e31a5f24ba9716ed90138769e4977a2ba6014ae63eb5" -dependencies = [ - "async-channel", - "futures-core", - "futures-io", -] - [[package]] name = "smallvec" version = "1.13.2" @@ -2479,20 +2487,14 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", "windows-sys 0.52.0", ] -[[package]] -name = "spin" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" - [[package]] name = "spin" version = "0.9.8" @@ -2509,6 +2511,12 @@ dependencies = [ "der", ] +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + [[package]] name = "static_assertions" version = "1.1.0" @@ -2532,17 +2540,50 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" +[[package]] +name = "structmeta" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e1575d8d40908d70f6fd05537266b90ae71b15dbbe7a8b7dffa2b759306d329" +dependencies = [ + "proc-macro2", + "quote", + "structmeta-derive", + "syn", +] + +[[package]] +name = "structmeta-derive" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "152a0b65a590ff6c3da95cabe2353ee04e6167c896b28e3b14478c2636c922fc" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "subtle" version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" +[[package]] +name = "subtp" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0047fd0d9ecdd63a24dfa175437888045dafaecfb3487c7c84f52ef7483d08a6" +dependencies = [ + "peg", + "thiserror 1.0.69", +] + [[package]] name = "syn" -version = "1.0.109" +version = "2.0.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" dependencies = [ "proc-macro2", "quote", @@ -2550,14 +2591,23 @@ dependencies = [ ] [[package]] -name = "syn" -version = "2.0.82" +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83540f837a8afc019423a8edb95b52a8effe46957ee402287f4292fae35be021" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "unicode-ident", + "syn", ] [[package]] @@ -2577,22 +2627,42 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.65" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec2a1820ebd077e2b90c4df007bebf344cd394098a13c563957d0afc83ea47" +dependencies = [ + "thiserror-impl 2.0.6", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d11abd9594d9b38965ef50805c5e469ca9cc6f197f883f717e0269a3057b3d5" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ - "thiserror-impl", + "proc-macro2", + "quote", + "syn", ] [[package]] name = "thiserror-impl" -version = "1.0.65" +version = "2.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae71770322cbd277e69d762a16c444af02aa0575ac0d174f0b9562d3b37f8602" +checksum = "d65750cab40f4ff1929fb1ba509e9914eb756131cef4210da8d5d700d26f6312" dependencies = [ "proc-macro2", "quote", - "syn 2.0.82", + "syn", ] [[package]] @@ -2627,9 +2697,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.36" +version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" dependencies = [ "deranged", "itoa", @@ -2648,14 +2718,24 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" dependencies = [ "num-conv", "time-core", ] +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tinyvec" version = "1.8.0" @@ -2671,10 +2751,32 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" +[[package]] +name = "tls_codec" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e78c9c330f8c85b2bae7c8368f2739157db9991235123aa1b15ef9502bfb6a" +dependencies = [ + "tls_codec_derive", + "zeroize", +] + +[[package]] +name = "tls_codec_derive" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d9ef545650e79f30233c0003bcc2504d7efac6dad25fca40744de773fe2049c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "tobira" version = "2.13.0" dependencies = [ + "ahash", "anyhow", "base64 0.22.1", "bincode", @@ -2688,6 +2790,7 @@ dependencies = [ "cookie", "deadpool", "deadpool-postgres", + "either", "elliptic-curve", "fallible-iterator", "form_urlencoded", @@ -2699,9 +2802,8 @@ dependencies = [ "hyper-rustls", "hyper-util", "hyperlocal", - "isahc", + "iso8601", "juniper", - "libz-sys", "meilisearch-sdk", "mime_guess", "nu-ansi-term 0.50.1", @@ -2720,7 +2822,8 @@ dependencies = [ "rand", "regex", "reinda", - "ring 0.17.8", + "reqwest", + "ring", "rustls", "rustls-native-certs", "rustls-pemfile", @@ -2729,7 +2832,9 @@ dependencies = [ "serde", "serde_json", "serde_yaml", + "sha1", "static_assertions", + "subtp", "tap", "termcolor", "tikv-jemallocator", @@ -2741,13 +2846,14 @@ dependencies = [ "tracing-log", "tracing-subscriber", "url", + "xmlparser", ] [[package]] name = "tokio" -version = "1.41.0" +version = "1.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "145f3413504347a2be84393cc8a7d2fb4d863b375909ea59f2158261aa258bbb" +checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" dependencies = [ "backtrace", "bytes", @@ -2767,7 +2873,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.82", + "syn", ] [[package]] @@ -2798,35 +2904,34 @@ dependencies = [ [[package]] name = "tokio-postgres-rustls" -version = "0.11.1" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ea13f22eda7127c827983bdaf0d7fff9df21c8817bab02815ac277a21143677" +checksum = "27d684bad428a0f2481f42241f821db42c54e2dc81d8c00db8536c506b0a0144" dependencies = [ - "futures", - "ring 0.17.8", + "const-oid", + "ring", "rustls", "tokio", "tokio-postgres", "tokio-rustls", - "x509-certificate", + "x509-cert", ] [[package]] name = "tokio-rustls" -version = "0.25.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" +checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" dependencies = [ "rustls", - "rustls-pki-types", "tokio", ] [[package]] name = "tokio-util" -version = "0.7.12" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" dependencies = [ "bytes", "futures-core", @@ -2862,7 +2967,7 @@ version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ - "indexmap 2.6.0", + "indexmap", "serde", "serde_spanned", "toml_datetime", @@ -2877,9 +2982,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "log", "pin-project-lite", @@ -2889,35 +2994,25 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.82", + "syn", ] [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", ] -[[package]] -name = "tracing-futures" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" -dependencies = [ - "pin-project", - "tracing", -] - [[package]] name = "tracing-log" version = "0.2.0" @@ -2931,9 +3026,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ "nu-ansi-term 0.46.0", "sharded-slab", @@ -2969,9 +3064,9 @@ checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" [[package]] name = "unicode-normalization" @@ -3000,27 +3095,12 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" -[[package]] -name = "unreachable" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" -dependencies = [ - "void", -] - [[package]] name = "unsafe-libyaml" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" -[[package]] -name = "untrusted" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" - [[package]] name = "untrusted" version = "0.9.0" @@ -3029,15 +3109,27 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.2" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", "percent-encoding", ] +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" version = "0.2.2" @@ -3051,6 +3143,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" dependencies = [ "getrandom", + "wasm-bindgen", ] [[package]] @@ -3077,12 +3170,6 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" -[[package]] -name = "waker-fn" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317211a0dc0ceedd78fb2ca9a44aed3d7b9b26f81870d485c07122b4350673b7" - [[package]] name = "want" version = "0.3.1" @@ -3106,9 +3193,9 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" +checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396" dependencies = [ "cfg-if", "once_cell", @@ -3117,36 +3204,36 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" +checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79" dependencies = [ "bumpalo", "log", - "once_cell", "proc-macro2", "quote", - "syn 2.0.82", + "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.45" +version = "0.4.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b" +checksum = "38176d9b44ea84e9184eff0bc34cc167ed044f816accfe5922e54d84cf48eca2" dependencies = [ "cfg-if", "js-sys", + "once_cell", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" +checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3154,33 +3241,65 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" +checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.82", + "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" +checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6" + +[[package]] +name = "wasm-streams" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] [[package]] name = "web-sys" -version = "0.3.72" +version = "0.3.76" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04dd7223427d52553d3702c004d3b2fe07c148165faa56313cb00211e31c12bc" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" dependencies = [ "js-sys", "wasm-bindgen", ] +[[package]] +name = "webpki-roots" +version = "0.26.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "whoami" version = "1.5.2" @@ -3230,7 +3349,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" dependencies = [ "windows-core", - "windows-targets 0.52.6", + "windows-targets", ] [[package]] @@ -3239,49 +3358,55 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.52.6", + "windows-targets", ] [[package]] -name = "windows-sys" -version = "0.48.0" +name = "windows-registry" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" dependencies = [ - "windows-targets 0.48.5", + "windows-result", + "windows-strings", + "windows-targets", ] [[package]] -name = "windows-sys" -version = "0.52.0" +name = "windows-result" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-strings" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" dependencies = [ - "windows-targets 0.52.6", + "windows-result", + "windows-targets", ] [[package]] name = "windows-sys" -version = "0.59.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.6", + "windows-targets", ] [[package]] -name = "windows-targets" -version = "0.48.5" +name = "windows-sys" +version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", + "windows-targets", ] [[package]] @@ -3290,46 +3415,28 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.6", - "windows_aarch64_msvc 0.52.6", - "windows_i686_gnu 0.52.6", + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", "windows_i686_gnullvm", - "windows_i686_msvc 0.52.6", - "windows_x86_64_gnu 0.52.6", - "windows_x86_64_gnullvm 0.52.6", - "windows_x86_64_msvc 0.52.6", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", ] -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" - [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" - [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" -[[package]] -name = "windows_i686_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" - [[package]] name = "windows_i686_gnu" version = "0.52.6" @@ -3342,48 +3449,24 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" -[[package]] -name = "windows_i686_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" - [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" - [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" - [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" - [[package]] name = "windows_x86_64_msvc" version = "0.52.6" @@ -3400,32 +3483,68 @@ dependencies = [ ] [[package]] -name = "x509-certificate" -version = "0.23.1" +name = "write16" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66534846dec7a11d7c50a74b7cdb208b9a581cad890b7866430d438455847c85" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + +[[package]] +name = "x509-cert" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1301e935010a701ae5f8655edc0ad17c44bad3ac5ce8c39185f75453b720ae94" dependencies = [ - "bcder", - "bytes", - "chrono", + "const-oid", "der", - "hex", - "pem", - "ring 0.17.8", - "signature", "spki", - "thiserror", - "zeroize", + "tls_codec", ] +[[package]] +name = "xmlparser" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4" + [[package]] name = "yaup" -version = "0.2.1" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a59e7d27bed43f7c37c25df5192ea9d435a8092a902e02203359ac9ce3e429d9" +checksum = "b0144f1a16a199846cb21024da74edd930b43443463292f536b7110b4855b5c6" dependencies = [ + "form_urlencoded", "serde", - "url", + "thiserror 1.0.69", +] + +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", ] [[package]] @@ -3446,7 +3565,28 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.82", + "syn", +] + +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", ] [[package]] @@ -3466,5 +3606,27 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.82", + "syn", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn", ] diff --git a/backend/Cargo.toml b/backend/Cargo.toml index 30e01b6b3..c308e986e 100644 --- a/backend/Cargo.toml +++ b/backend/Cargo.toml @@ -17,6 +17,7 @@ embed-in-debug = ["reinda/always-prod"] [dependencies] +ahash = "0.8" anyhow = { version = "1.0.71", features = ["backtrace"] } base64 = "0.22.1" bincode = "1.3.3" @@ -27,8 +28,9 @@ chrono = { version = "0.4", default-features = false, features = ["serde", "std" clap = { version = "4.2.2", features = ["derive", "string"] } confique = { version = "0.3", features = ["toml"] } cookie = "0.18.0" -deadpool = { version = "0.10.0", default-features = false, features = ["managed", "rt_tokio_1"] } -deadpool-postgres = { version = "0.12.1", default-features = false, features = ["rt_tokio_1"] } +deadpool = { version = "0.12.1", default-features = false, features = ["managed", "rt_tokio_1"] } +deadpool-postgres = { version = "0.14.0", default-features = false, features = ["rt_tokio_1"] } +either = "1.13.0" elliptic-curve = { version = "0.13.4", features = ["jwk", "sec1"] } fallible-iterator = "0.2.0" form_urlencoded = "1.1.0" @@ -38,12 +40,11 @@ hostname = "0.4.0" hyper = { version = "1", features = ["client", "http1", "http2"] } hyperlocal = "0.9.1" http-body-util = "0.1" -hyper-rustls = { version = "0.26.0", features = ["http2"] } +hyper-rustls = { version = "0.27.3", default-features = false, features = ["http1", "http2", "native-tokio", "logging", "tls12"] } hyper-util = { version = "0.1.3", features = ["client", "server", "http1", "http2"] } -isahc = { version = "1", features = ["static-ssl"] } -juniper = { version = "0.15.10", default-features = false, features = ["chrono", "schema-language"] } -libz-sys = { version = "1", features = ["static"] } -meilisearch-sdk = "0.24.3" +iso8601 = "0.6.1" +juniper = { version = "0.16.1", default-features = false, features = ["chrono", "schema-language", "anyhow", "backtrace"] } +meilisearch-sdk = { path = "vendor/meilisearch-sdk" } mime_guess = { version = "2", default-features = false } nu-ansi-term = "0.50.1" ogrim = "0.1.1" @@ -61,11 +62,11 @@ rand = "0.8.4" regex = "1.7.1" reinda = "0.3" ring = "0.17.8" -rustls = "0.22.4" -rustls-native-certs = "0.7.0" +rustls = { version = "0.23.20", default-features = false, features = ["ring"] } +rustls-native-certs = "0.8.1" rustls-pemfile = "2.1.0" scc = "2.0.17" -secrecy = { version = "0.8", features = ["serde"] } +secrecy = { version = "0.10.3", features = ["serde"] } serde = { version = "1.0.192", features = ["derive"] } serde_json = "1" serde_yaml = "0.9.21" @@ -75,11 +76,15 @@ termcolor = "1.1.1" time = "0.3" tokio = { version = "1.36", features = ["fs", "rt-multi-thread", "macros", "time"] } tokio-postgres = { version = "0.7", features = ["with-chrono-0_4", "with-serde_json-1"] } -tokio-postgres-rustls = "0.11.1" +tokio-postgres-rustls = "0.13.0" url = "2.4.1" tracing = { version = "0.1.40", features = ["log"] } tracing-log = "0.2.0" tracing-subscriber = "0.3.18" +reqwest = { version = "0.12.4", default-features = false, features = ["rustls-tls"] } +subtp = "0.2.0" +xmlparser = "0.13.6" +sha1 = "0.10.6" [target.'cfg(target_os = "linux")'.dependencies] procfs = "0.17.0" diff --git a/backend/src/api/common.rs b/backend/src/api/common.rs index 7bc5443a3..570898ba9 100644 --- a/backend/src/api/common.rs +++ b/backend/src/api/common.rs @@ -1,4 +1,5 @@ use bincode::Options; +use juniper::{GraphQLScalar, InputValue, ScalarValue}; use serde::{Deserialize, Serialize}; use crate::{ @@ -7,18 +8,15 @@ use crate::{ Context, err::{self, ApiResult}, model::{ - event::AuthorizedEvent, - series::Series, - realm::Realm, + event::AuthorizedEvent, + series::Series, + realm::Realm, playlist::AuthorizedPlaylist, + search::{SearchEvent, SearchRealm, SearchSeries}, }, }, prelude::*, - search::Event as SearchEvent, - search::Realm as SearchRealm, - search::Series as SearchSeries, search::Playlist as SearchPlaylist, - db::types::ExtraMetadata, }; @@ -61,7 +59,11 @@ super::util::impl_object_with_dummy_field!(NotAllowed); /// serialization format from `bincode`, a compact binary serializer. Of course /// we could also have serialized it as JSON and base64 encoded it then, but /// that would be a waste of network bandwidth. -#[derive(Debug, Clone)] +#[derive(Debug, Clone, GraphQLScalar)] +#[graphql( + description = "An opaque cursor used for pagination", + parse_token(String), +)] pub(crate) struct Cursor(String); impl Cursor { @@ -87,69 +89,13 @@ impl Cursor { .deserialize_from(b64reader) .map_err(|e| err::invalid_input!("given cursor is invalid: {}", e)) } -} -#[juniper::graphql_scalar( - name = "Cursor", - description = "An opaque cursor used for pagination", -)] -impl GraphQLScalar for Cursor -where - S: juniper::ScalarValue, -{ - fn resolve(&self) -> juniper::Value { + fn to_output(&self) -> juniper::Value { juniper::Value::scalar(self.0.clone()) } - fn from_input_value(value: &juniper::InputValue) -> Option { - value.as_string_value().map(|s| Self(s.into())) - } - - fn from_str<'a>(value: juniper::ScalarToken<'a>) -> juniper::ParseScalarResult<'a, S> { - >::from_str(value) - } -} - - -#[juniper::graphql_scalar( - name = "ExtraMetadata", - description = "Arbitrary metadata for events/series. Serialized as JSON object.", -)] -impl GraphQLScalar for ExtraMetadata -where - S: juniper::ScalarValue -{ - fn resolve(&self) -> juniper::Value { - use juniper::Value; - - std::iter::once(("dcterms", &self.dcterms)) - .chain(self.extended.iter().map(|(k, v)| (&**k, v))) - .map(|(k, v)| { - let value = v.iter() - .map(|(k, v)| { - let elements = v.iter() - .map(|s| Value::Scalar(S::from(s.clone()))) - .collect(); - (k, Value::List(elements)) - }) - .collect::>(); - - (k, Value::Object(value)) - }) - .collect::>() - .pipe(Value::Object) - } - - fn from_input_value(value: &juniper::InputValue) -> Option { - // I did not want to waste time implementing this now, given that we - // likely never use it. - let _ = value; - todo!("ExtraMetadata cannot be used as input value yet") - } - - fn from_str<'a>(value: juniper::ScalarToken<'a>) -> juniper::ParseScalarResult<'a, S> { - // See `from_input_value` - let _ = value; - todo!() + fn from_input(input: &InputValue) -> Result { + let s = input.as_string_value().ok_or("expected string")?; + Ok(Self(s.into())) } } diff --git a/backend/src/api/err.rs b/backend/src/api/err.rs index bd387fea5..31616f811 100644 --- a/backend/src/api/err.rs +++ b/backend/src/api/err.rs @@ -33,6 +33,9 @@ pub(crate) enum ApiErrorKind { /// Communication error with Opencast. OpencastUnavailable, + + /// Generic Opencast error. + OpencastError, } impl ApiErrorKind { @@ -43,6 +46,7 @@ impl ApiErrorKind { Self::NotAuthorized => "NOT_AUTHORIZED", Self::InternalServerError => "INTERNAL_SERVER_ERROR", Self::OpencastUnavailable => "OPENCAST_UNAVAILABLE", + Self::OpencastError => "OPENCAST_ERROR", } } @@ -52,6 +56,7 @@ impl ApiErrorKind { Self::NotAuthorized => "Not authorized", Self::InternalServerError => "Internal server error", Self::OpencastUnavailable => "Opencast unavailable", + Self::OpencastError => "Opencast error", } } } @@ -139,10 +144,15 @@ macro_rules! opencast_unavailable { ($($t:tt)+) => { $crate::api::err::api_err!(OpencastUnavailable, $($t)*) }; } +macro_rules! opencast_error { + ($($t:tt)+) => { $crate::api::err::api_err!(OpencastError, $($t)*) }; +} + pub(crate) use api_err; pub(crate) use invalid_input; pub(crate) use not_authorized; pub(crate) use opencast_unavailable; +pub(crate) use opencast_error; // ===== Helper macro to inspect DbError ================================================== diff --git a/backend/src/api/id.rs b/backend/src/api/id.rs index 42c14eb87..7ac32c810 100644 --- a/backend/src/api/id.rs +++ b/backend/src/api/id.rs @@ -1,9 +1,10 @@ +use juniper::{GraphQLScalar, InputValue, ScalarValue}; use paste::paste; use serde::{Deserialize, Serialize}; use static_assertions::const_assert; use std::fmt; -use crate::db::types::Key; +use crate::model::Key; /// An opaque, globally-unique identifier for all "nodes" that the GraphQL API @@ -14,7 +15,12 @@ use crate::db::types::Key; /// sure we can easily convert the ID to a database primary key. /// /// Each key is encoded as 12 byte ASCII string. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, GraphQLScalar)] +#[graphql( + name = "ID", + description = "An opaque, globally-unique identifier", + parse_token(String), +)] pub(crate) struct Id { /// The kind of node. Each different "thing" in our API has a different /// static prefix. For example, realms have the prefix `b"re"`. All IDs @@ -121,49 +127,28 @@ impl Id { pub(crate) fn kind(&self) -> [u8; 2] { self.kind } -} - -/// The URL-safe base64 alphabet. -const BASE64_DIGITS: &[u8; 64] = - b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_"; -impl Key { - pub(crate) fn from_base64(s: &str) -> Option { - if s.len() != 11 { - return None; - } - - decode_base64(s.as_bytes()) + fn to_output(&self) -> juniper::Value { + juniper::Value::scalar(self.to_string()) } - pub(crate) fn to_base64<'a>(&self, out: &'a mut [u8; 11]) -> &'a str { - // Base64 encoding. After this loop, `n` is always 0, because `u64::MAX` - // divided by 64 eleven times is 0. - let mut n = self.0; - for i in (0..out.len()).rev() { - out[i] = BASE64_DIGITS[(n % 64) as usize]; - n /= 64; - } - debug_assert!(n == 0); - - std::str::from_utf8(out) - .expect("bug: base64 did produce non-ASCII character") + fn from_input(input: &InputValue) -> Result { + let s = input.as_string_value().ok_or("expected string")?; + Ok(s.parse().unwrap_or(Self::invalid())) } } impl std::str::FromStr for Id { - // TODO: we might want to have more information about the error later, but - // the GraphQL API doesn't currently use it anyway. - type Err = (); + type Err = &'static str; fn from_str(s: &str) -> Result { if s.len() != 13 { - return Err(()); + return Err("invalid length"); } let bytes = s.as_bytes(); let kind = [bytes[0], bytes[1]]; - let key = Key::from_base64(&s[2..]).ok_or(())?; + let key = Key::from_base64(&s[2..]).ok_or("invalid base64")?; Ok(Self { kind, key }) } @@ -182,80 +167,10 @@ impl fmt::Display for Id { } } -#[juniper::graphql_scalar( - name = "ID", - description = "An opaque, globally-unique identifier", -)] -impl GraphQLScalar for Id -where - S: juniper::ScalarValue -{ - fn resolve(&self) -> juniper::Value { - juniper::Value::scalar(self.to_string()) - } - - fn from_input_value(value: &juniper::InputValue) -> Option { - let s = value.as_string_value()?; - Some(s.parse().unwrap_or(Self::invalid())) - } - - fn from_str<'a>(value: juniper::ScalarToken<'a>) -> juniper::ParseScalarResult<'a, S> { - >::from_str(value) - } -} - -fn decode_base64(src: &[u8]) -> Option { - /// The reverse lookup table to `BASE64_DIGITS`. If you index by an ASCII value, you - /// either get the corresponding digit value OR `0xFF`, signalling that the - /// character is not a valid base64 character. - const DECODE_TABLE: [u8; 256] = create_decode_table(); - - const fn create_decode_table() -> [u8; 256] { - let mut out = [0xFF; 256]; - - // If you wonder why we are using `while` instead of a more idiomatic loop: - // const fns are still somewhat limited and do not allow `for`. - let mut i = 0; - while i < BASE64_DIGITS.len() { - out[BASE64_DIGITS[i] as usize] = i as u8; - i += 1; - } - - out - } - - fn lookup(ascii: u8) -> Option { - let raw = DECODE_TABLE[ascii as usize]; - if raw == 0xFF { - return None; - } - - Some(raw as u64) - } - - let src: [u8; 11] = src.try_into().ok()?; - - // Make sure the string doesn't decode to a number > `u64::MAX`. Luckily, - // checking that is easy. `u64::MAX` encodes to `P__________`, so the next - // higher number would carry through and make the highest digit a `Q`. So we - // just make sure the first digit is between 'A' and 'P'. - if src[0] > b'P' || src[0] < b'A' { - return None; - } - - src.iter() - .rev() - .enumerate() - .map(|(i, &d)| lookup(d).map(|n| n * 64u64.pow(i as u32))) - .sum::>() - .map(Key) -} - - #[cfg(test)] mod tests { use std::str::FromStr; - use super::{Id, Key, BASE64_DIGITS}; + use super::{Id, Key}; #[test] fn simple() { @@ -280,18 +195,18 @@ mod tests { #[test] fn invalid_decode() { // Wrong length - assert_eq!(Id::from_str(""), Err(())); - assert_eq!(Id::from_str("re"), Err(())); - assert_eq!(Id::from_str("reAAAAAAAAAAAA"), Err(())); + assert_eq!(Id::from_str(""), Err("invalid length")); + assert_eq!(Id::from_str("re"), Err("invalid length")); + assert_eq!(Id::from_str("reAAAAAAAAAAAA"), Err("invalid length")); // Invalid characters - assert_eq!(Id::from_str("re0000000000*"), Err(())); - assert_eq!(Id::from_str("re0000000000?"), Err(())); - assert_eq!(Id::from_str("re0000000000/"), Err(())); + assert_eq!(Id::from_str("re0000000000*"), Err("invalid base64")); + assert_eq!(Id::from_str("re0000000000?"), Err("invalid base64")); + assert_eq!(Id::from_str("re0000000000/"), Err("invalid base64")); // Encoded value > u64::MAX - assert_eq!(Id::from_str("srQAAAAAAAAAA"), Err(())); - assert_eq!(Id::from_str("sr___________"), Err(())); + assert_eq!(Id::from_str("srQAAAAAAAAAA"), Err("invalid base64")); + assert_eq!(Id::from_str("sr___________"), Err("invalid base64")); } #[test] @@ -305,7 +220,7 @@ mod tests { let id = Id { kind: Id::REALM_KIND, key: Key((n as u64) << shift) }; let s = id.to_string(); assert_eq!(s[..2].as_bytes(), Id::REALM_KIND); - assert!(s[2..].bytes().all(|d| BASE64_DIGITS.contains(&d))); + assert!(s[2..].bytes().all(|d| crate::util::BASE64_DIGITS.contains(&d))); } } } diff --git a/backend/src/api/mod.rs b/backend/src/api/mod.rs index e80e81599..14ecdb25f 100644 --- a/backend/src/api/mod.rs +++ b/backend/src/api/mod.rs @@ -6,12 +6,12 @@ use self::{ subscription::Subscription, }; +pub(crate) mod err; pub(crate) mod util; pub(crate) mod model; mod common; mod context; -mod err; mod id; mod jwt; mod mutation; diff --git a/backend/src/api/model/acl.rs b/backend/src/api/model/acl.rs index 0c6607422..56bcd1394 100644 --- a/backend/src/api/model/acl.rs +++ b/backend/src/api/model/acl.rs @@ -1,7 +1,8 @@ -use juniper::GraphQLObject; +use juniper::{GraphQLInputObject, GraphQLObject}; use postgres_types::BorrowToSql; +use serde::Serialize; -use crate::{api::{util::TranslatedString, Context, err::ApiResult}, db::util::select}; +use crate::{api::{err::ApiResult, Context}, model::TranslatedString, db::util::select}; @@ -30,9 +31,9 @@ pub(crate) struct AclItem { #[graphql(context = Context)] pub(crate) struct RoleInfo { /// A user-facing label for this role (group or person). If the label does - /// not depend on the language (e.g. a name), `{ "_": "Peter" }` is + /// not depend on the language (e.g. a name), `{ "default": "Peter" }` is /// returned. - pub label: TranslatedString, + pub label: TranslatedString, /// For user roles this is `null`. For groups, it defines a list of other /// group roles that this role implies. I.e. a user with this role always @@ -65,7 +66,7 @@ where known_groups.label, case when users.display_name is null then null - else hstore('_', users.display_name) + else hstore('default', users.display_name) end )", ); @@ -94,3 +95,9 @@ where } }).await.map_err(Into::into) } + +#[derive(Debug, GraphQLInputObject, Serialize)] +pub(crate) struct AclInputEntry { + pub role: String, + pub actions: Vec, +} diff --git a/backend/src/api/model/block/mod.rs b/backend/src/api/model/block/mod.rs index 3041e8d55..b134d73bb 100644 --- a/backend/src/api/model/block/mod.rs +++ b/backend/src/api/model/block/mod.rs @@ -9,13 +9,14 @@ use crate::{ model::{ event::{AuthorizedEvent, Event}, playlist::Playlist, - realm::{Realm, RealmNameSourceBlockValue}, + realm::Realm, series::Series }, Context, Id, }, - db::{types::Key, util::impl_from_db}, + model::Key, + db::util::impl_from_db, prelude::*, }; @@ -43,26 +44,31 @@ pub(crate) use mutations::{ for = [TitleBlock, TextBlock, SeriesBlock, VideoBlock, PlaylistBlock] )] pub(crate) trait Block { - // To avoid code duplication, all the shared data is stored in `SharedData` - // and only a `shared` method is mandatory. All other method (in particular, - // all that are visible to GraphQL) are defined in the trait already. - #[graphql(skip)] - fn shared(&self) -> &SharedData; + fn id(&self) -> Id; + fn index(&self) -> i32; + async fn realm(&self, context: &Context) -> ApiResult; +} - fn id(&self) -> Id { - self.shared().id - } - fn index(&self) -> i32 { - self.shared().index - } - async fn realm(&self, context: &Context) -> ApiResult { - Realm::load_by_key(self.shared().realm_key, context) - .await - // Foreign key constraints guarantee the realm exists - .map(Option::unwrap) - } +macro_rules! impl_block { + ($ty:ty) => { + impl Block for $ty { + fn id(&self) -> Id { + self.shared.id + } + fn index(&self) -> i32 { + self.shared.index + } + async fn realm(&self, context: &Context) -> ApiResult { + Realm::load_by_key(self.shared.realm_key, context) + .await + // Foreign key constraints guarantee the realm exists + .map(Option::unwrap) + } + } + }; } + #[derive(Debug, Clone, Copy, FromSql)] #[postgres(name = "block_type")] pub(crate) enum BlockType { @@ -118,11 +124,7 @@ pub(crate) struct TitleBlock { pub(crate) content: String, } -impl Block for TitleBlock { - fn shared(&self) -> &SharedData { - &self.shared - } -} +impl_block!(TitleBlock); /// A block just showing some title. #[graphql_object(Context = Context, impl = BlockValue)] @@ -150,11 +152,7 @@ pub(crate) struct TextBlock { pub(crate) content: String, } -impl Block for TextBlock { - fn shared(&self) -> &SharedData { - &self.shared - } -} +impl_block!(TextBlock); /// A block just showing some text. #[graphql_object(Context = Context, impl = BlockValue)] @@ -186,14 +184,10 @@ pub(crate) struct SeriesBlock { pub(crate) layout: VideoListLayout, } -impl Block for SeriesBlock { - fn shared(&self) -> &SharedData { - &self.shared - } -} +impl_block!(SeriesBlock); /// A block just showing the list of videos in an Opencast series -#[graphql_object(Context = Context, impl = [BlockValue, RealmNameSourceBlockValue])] +#[graphql_object(Context = Context, impl = [BlockValue])] impl SeriesBlock { async fn series(&self, context: &Context) -> ApiResult> { match self.series { @@ -241,14 +235,10 @@ pub(crate) struct VideoBlock { pub(crate) show_link: bool, } -impl Block for VideoBlock { - fn shared(&self) -> &SharedData { - &self.shared - } -} +impl_block!(VideoBlock); /// A block for presenting a single Opencast event -#[graphql_object(Context = Context, impl = [BlockValue, RealmNameSourceBlockValue])] +#[graphql_object(Context = Context, impl = [BlockValue])] impl VideoBlock { async fn event(&self, context: &Context) -> ApiResult> { match self.event { @@ -289,14 +279,10 @@ pub(crate) struct PlaylistBlock { pub(crate) layout: VideoListLayout, } -impl Block for PlaylistBlock { - fn shared(&self) -> &SharedData { - &self.shared - } -} +impl_block!(PlaylistBlock); /// A block just showing the list of videos in an Opencast playlist -#[graphql_object(Context = Context, impl = [BlockValue, RealmNameSourceBlockValue])] +#[graphql_object(Context = Context, impl = [BlockValue])] impl PlaylistBlock { async fn playlist(&self, context: &Context) -> ApiResult> { match self.playlist { diff --git a/backend/src/api/model/block/mutations.rs b/backend/src/api/model/block/mutations.rs index 06e3c4f7b..f038ad699 100644 --- a/backend/src/api/model/block/mutations.rs +++ b/backend/src/api/model/block/mutations.rs @@ -2,7 +2,8 @@ use juniper::{GraphQLInputObject, GraphQLObject}; use crate::{ api::{Context, Id, err::{ApiResult, invalid_input}, model::realm::Realm}, - db::{types::Key, util::select}, + db::util::select, + model::Key, prelude::*, }; use super::{BlockValue, VideoListOrder, VideoListLayout}; diff --git a/backend/src/api/model/event.rs b/backend/src/api/model/event.rs index a69487d90..6e5e23ae6 100644 --- a/backend/src/api/model/event.rs +++ b/backend/src/api/model/event.rs @@ -1,31 +1,45 @@ +use std::collections::HashSet; + use chrono::{DateTime, Utc}; use hyper::StatusCode; use postgres_types::ToSql; use serde::{Serialize, Deserialize}; use tokio_postgres::Row; -use juniper::{GraphQLObject, graphql_object}; +use juniper::{graphql_object, Executor, GraphQLObject, ScalarValue}; +use sha1::{Sha1, Digest}; use crate::{ api::{ - Context, Cursor, Id, Node, NodeValue, + Context, + Cursor, + Id, + Node, + NodeValue, common::NotAllowed, err::{self, invalid_input, ApiResult}, - model::{acl::{self, Acl}, realm::Realm, series::Series}, + model::{ + acl::{self, Acl}, + realm::Realm, + series::Series, + }, }, db::{ - types::{EventCaption, EventSegment, EventState, EventTrack, ExtraMetadata, Key}, + types::{EventCaption, EventSegment, EventState, EventTrack, Credentials}, util::{impl_from_db, select}, }, + model::{Key, ExtraMetadata}, prelude::*, }; +use self::{acl::AclInputEntry, err::ApiError}; + use super::playlist::VideoListEntry; #[derive(Debug)] pub(crate) struct AuthorizedEvent { pub(crate) key: Key, - pub(crate) series: Option, + pub(crate) series: Option, pub(crate) opencast_id: String, pub(crate) is_live: bool, @@ -37,21 +51,35 @@ pub(crate) struct AuthorizedEvent { pub(crate) metadata: ExtraMetadata, pub(crate) read_roles: Vec, pub(crate) write_roles: Vec, + pub(crate) preview_roles: Vec, + pub(crate) credentials: Option, pub(crate) synced_data: Option, + pub(crate) authorized_data: Option, pub(crate) tobira_deletion_timestamp: Option>, } +#[derive(Debug)] +pub(crate) struct PreloadedSeries { + key: Key, + opencast_id: String, + title: String, +} + #[derive(Debug)] pub(crate) struct SyncedEventData { updated: DateTime, start_time: Option>, end_time: Option>, - + thumbnail: Option, /// Duration in milliseconds duration: i64, + audio_only: bool, +} + +#[derive(Debug)] +pub(crate) struct AuthorizedEventData { tracks: Vec, - thumbnail: Option, captions: Vec, segments: Vec, } @@ -64,14 +92,21 @@ impl_from_db!( title, description, duration, creators, thumbnail, metadata, created, updated, start_time, end_time, tracks, captions, segments, - read_roles, write_roles, + read_roles, write_roles, preview_roles, credentials, tobira_deletion_timestamp, }, + series.{ series_title: "title", series_oc_id: "opencast_id" }, }, |row| { + let tracks: Vec = row.tracks::>().into_iter().map(Track::from).collect(); + let series = row.series::>().map(|key| PreloadedSeries { + key, + opencast_id: row.series_oc_id(), + title: row.series_title(), + }); Self { key: row.id(), - series: row.series(), + series: series, opencast_id: row.opencast_id(), is_live: row.is_live(), title: row.title(), @@ -81,6 +116,8 @@ impl_from_db!( metadata: row.metadata(), read_roles: row.read_roles::>(), write_roles: row.write_roles::>(), + preview_roles: row.preview_roles::>(), + credentials: row.credentials(), tobira_deletion_timestamp: row.tobira_deletion_timestamp(), synced_data: match row.state::() { EventState::Ready => Some(SyncedEventData { @@ -89,7 +126,13 @@ impl_from_db!( end_time: row.end_time(), duration: row.duration(), thumbnail: row.thumbnail(), - tracks: row.tracks::>().into_iter().map(Track::from).collect(), + audio_only: tracks.iter().all(|t| t.resolution.is_none()), + }), + EventState::Waiting => None, + }, + authorized_data: match row.state::() { + EventState::Ready => Some(AuthorizedEventData { + tracks, captions: row.captions::>() .into_iter() .map(Caption::from) @@ -111,9 +154,7 @@ pub(crate) struct Track { uri: String, flavor: String, mimetype: Option, - // TODO: this should be `[i32; 2]` but the relevant patch is not released - // yet: https://github.com/graphql-rust/juniper/pull/966 - resolution: Option>, + resolution: Option<[i32; 2]>, is_master: Option, } @@ -135,7 +176,7 @@ impl Node for AuthorizedEvent { } } -#[graphql_object(Context = Context, impl = NodeValue)] +#[graphql_object(Context = Context)] impl SyncedEventData { fn updated(&self) -> DateTime { self.updated @@ -150,12 +191,21 @@ impl SyncedEventData { fn duration(&self) -> f64 { self.duration as f64 } - fn tracks(&self) -> &[Track] { - &self.tracks - } fn thumbnail(&self) -> Option<&str> { self.thumbnail.as_deref() } + fn audio_only(&self) -> bool { + self.audio_only + } +} + +/// Represents event data that is only accessible for users with read access +/// and event-specific authenticated users. +#[graphql_object(Context = Context)] +impl AuthorizedEventData { + fn tracks(&self) -> &[Track] { + &self.tracks + } fn captions(&self) -> &[Caption] { &self.captions } @@ -198,11 +248,42 @@ impl AuthorizedEvent { fn write_roles(&self) -> &[String] { &self.write_roles } + /// This doesn't contain `ROLE_ADMIN` as that is included implicitly. + fn preview_roles(&self) -> &[String] { + &self.preview_roles + } fn synced_data(&self) -> &Option { &self.synced_data } + /// Returns the authorized event data if the user has read access or is authenticated for the event. + async fn authorized_data( + &self, + context: &Context, + user: Option, + password: Option, + ) -> Option<&AuthorizedEventData> { + let sha1_matches = |input: &str, encoded: &str| { + let (algo, hash) = encoded.split_once(':').expect("invalid credentials in DB"); + match algo { + "sha1" => hash == hex::encode_upper(Sha1::digest(input)), + _ => unreachable!("unsupported hash algo"), + } + }; + + let credentials_match = self.credentials.as_ref().map_or(false, |credentials| { + user.map_or(false, |u| sha1_matches(&u, &credentials.name)) + && password.map_or(false, |p| sha1_matches(&p, &credentials.password)) + }); + + if context.auth.overlaps_roles(&self.read_roles) || credentials_match { + self.authorized_data.as_ref() + } else { + None + } + } + /// Whether the current user has write access to this event. fn can_write(&self, context: &Context) -> bool { context.auth.overlaps_roles(&self.write_roles) @@ -212,9 +293,55 @@ impl AuthorizedEvent { &self.tobira_deletion_timestamp } - async fn series(&self, context: &Context) -> ApiResult> { - if let Some(series) = self.series { - Ok(Series::load_by_key(series, context).await?) + /// Whether the event has active workflows. + async fn has_active_workflows(&self, context: &Context) -> ApiResult { + if !context.auth.overlaps_roles(&self.write_roles) { + return Err(err::not_authorized!( + key = "event.workflow.not-allowed", + "you are not allowed to inquire about this event's workflow activity", + )); + } + + let response = context + .oc_client + .has_active_workflows(&self.opencast_id) + .await + .map_err(|e| { + error!("Failed to get workflow activity: {}", e); + err::opencast_error!("API returned unexpected response, event might be unknown") + })?; + + Ok(response) + } + + async fn series( + &self, + context: &Context, + executor: &Executor<'_, '_, Context, S>, + ) -> ApiResult> { + if let Some(series) = &self.series { + let preloaded_fields = ["id", "title", "opencastId"]; + + if executor.look_ahead().children().names().all(|n| preloaded_fields.contains(&n)) { + // All requested fields are already preloaded. It would be nicer + // to have a separate type here and return + // `Either` but in the case of the + // series, we can just use the normal type and pass `None` for + // other fields. We know those fields are never read. + Ok(Some(Series { + key: series.key, + opencast_id: series.opencast_id.clone(), + title: series.title.clone(), + synced_data: None, + created: None, + metadata: None, + read_roles: None, + write_roles: None, + })) + } else { + // We need to load the series as fields were requested that were not preloaded. + Ok(Series::load_by_key(series.key, context).await?) + } } else { Ok(None) } @@ -233,11 +360,17 @@ impl AuthorizedEvent { "); context.db.query_mapped( &query, - dbargs![&self.key, &self.series, &self.opencast_id], + dbargs![&self.key, &self.series_key(), &self.opencast_id], |row| Realm::from_row_start(&row) ).await?.pipe(Ok) } + + /// Whether this event is password protected. + async fn has_password(&self) -> bool { + self.credentials.is_some() + } + async fn acl(&self, context: &Context) -> ApiResult { let raw_roles_sql = "\ select unnest(read_roles) as role, 'read' as action from events where id = $1 @@ -256,7 +389,7 @@ impl AuthorizedEvent { join realms on blocks.realm = realms.id \ where realms.full_path = $1 and does_block_make_event_listed(blocks, $2, $3, $4) \ )"; - context.db.query_one(&query, &[&path.trim_end_matches('/'), &self.key, &self.series, &self.opencast_id]) + context.db.query_one(&query, &[&path.trim_end_matches('/'), &self.key, &self.series_key(), &self.opencast_id]) .await? .get::<_, bool>(0) .pipe(Ok) @@ -300,13 +433,15 @@ impl AuthorizedEvent { context: &Context, ) -> ApiResult> { let selection = Self::select(); - let query = format!("select {selection} from events where {col} = $1"); + let query = format!("select {selection} from events \ + left join series on series.id = events.series \ + where events.{col} = $1"); context.db .query_opt(&query, &[id]) .await? .map(|row| { let event = Self::from_row_start(&row); - if context.auth.overlaps_roles(&event.read_roles) { + if event.can_be_previewed(context) { Event::Event(event) } else { Event::NotAllowed(NotAllowed) @@ -321,13 +456,14 @@ impl AuthorizedEvent { ) -> ApiResult> { let selection = Self::select(); let query = format!( - "select {selection} from events \ - where series = $1", + "select {selection} from series \ + inner join events on events.series = series.id \ + where series.id = $1", ); context.db .query_mapped(&query, dbargs![&series_key], |row| { let event = Self::from_row_start(&row); - if !context.auth.overlaps_roles(&event.read_roles) { + if !event.can_be_previewed(context) { return VideoListEntry::NotAllowed(NotAllowed); } @@ -337,21 +473,46 @@ impl AuthorizedEvent { .pipe(Ok) } - pub(crate) async fn delete(id: Id, context: &Context) -> ApiResult { + fn can_be_previewed(&self, context: &Context) -> bool { + context.auth.overlaps_roles(&self.preview_roles) + || context.auth.overlaps_roles(&self.read_roles) + } + + fn series_key(&self) -> Option { + self.series.as_ref().map(|s| s.key) + } + + async fn load_for_api( + id: Id, + context: &Context, + not_found_error: ApiError, + not_authorized_error: ApiError, + ) -> ApiResult { let event = Self::load_by_id(id, context) - .await? - .ok_or_else(|| err::invalid_input!( - key = "event.delete.not-found", - "event not found", - ))? + .await? + .ok_or_else(|| not_found_error)? .into_result()?; if !context.auth.overlaps_roles(&event.write_roles) { - return Err(err::not_authorized!( + return Err(not_authorized_error); + } + + Ok(event) + } + + pub(crate) async fn delete(id: Id, context: &Context) -> ApiResult { + let event = Self::load_for_api( + id, + context, + err::invalid_input!( + key = "event.delete.not-found", + "event not found" + ), + err::not_authorized!( key = "event.delete.not-allowed", "you are not allowed to delete this event", - )); - } + ) + ).await?; let response = context .oc_client @@ -381,6 +542,100 @@ impl AuthorizedEvent { } } + pub(crate) async fn update_acl(id: Id, acl: Vec, context: &Context) -> ApiResult { + if !context.config.general.allow_acl_edit { + return Err(err::not_authorized!("editing ACLs is not allowed")); + } + + info!(event_id = %id, "Requesting ACL update of event"); + let event = Self::load_for_api( + id, + context, + err::invalid_input!( + key = "event.acl.not-found", + "event not found", + ), + err::not_authorized!( + key = "event.acl.not-allowed", + "you are not allowed to update this event's acl", + ) + ).await?; + + if Self::has_active_workflows(&event, context).await? { + return Err(err::opencast_error!( + key = "event.workflow.active", + "acl change blocked by another workflow", + )); + } + + let response = context + .oc_client + .update_event_acl(&event.opencast_id, &acl, context) + .await + .map_err(|e| { + error!("Failed to send acl update request: {}", e); + err::opencast_unavailable!("Failed to send acl update request") + })?; + + if response.status() == StatusCode::NO_CONTENT { + // 204: The access control list for the specified event is updated. + Self::start_workflow(&event.opencast_id, "republish-metadata", &context).await?; + let db_acl = convert_acl_input(acl); + + // Todo: also update custom and preview roles once frontend sends these + context.db.execute("\ + update all_events \ + set read_roles = $2, write_roles = $3 \ + where id = $1 \ + ", &[&event.key, &db_acl.read_roles, &db_acl.write_roles]).await?; + + Self::load_by_id(id, context) + .await? + .ok_or_else(|| err::invalid_input!( + key = "event.acl.not-found", + "event not found", + ))? + .into_result() + } else { + warn!( + event_id = %id, + "Failed to update event acl, OC returned status: {}", + response.status(), + ); + Err(err::opencast_error!("Opencast API error: {}", response.status())) + } + } + + /// Starts a workflow on the event. + async fn start_workflow(oc_id: &str, workflow_id: &str, context: &Context) -> ApiResult { + let response = context + .oc_client + .start_workflow(&oc_id, workflow_id) + .await + .map_err(|e| { + error!("Failed sending request to start workflow: {}", e); + err::opencast_unavailable!("Failed to communicate with Opencast") + })?; + + if response.status() == StatusCode::CREATED { + // 201: A new workflow is created. + info!(%workflow_id, event_id = %oc_id, "Requested creation of workflow"); + Ok(response.status()) + } else if response.status() == StatusCode::NOT_FOUND { + // 404: The specified workflow instance does not exist. + warn!(%workflow_id, event_id = %oc_id, "The specified workflow instance does not exist."); + Err(err::opencast_error!("Opencast API error: {}", response.status())) + } else { + warn!( + %workflow_id, + event_id = %oc_id, + "Failed to create workflow, OC returned status: {}", + response.status(), + ); + Err(err::opencast_error!("Opencast API error: {}", response.status())) + } + } + pub(crate) async fn load_writable_for_user( context: &Context, order: EventSortOrder, @@ -423,11 +678,11 @@ impl AuthorizedEvent { (None, None) => String::new(), (Some(after), None) => { args.extend_from_slice(&[after.to_sql_arg(&order)?, &after.key]); - format!("where ({}, id) {} ($1, $2)", col, op_after) + format!("where (events.{}, events.id) {} ($1, $2)", col, op_after) } (None, Some(before)) => { args.extend_from_slice(&[before.to_sql_arg(&order)?, &before.key]); - format!("where ({}, id) {} ($1, $2)", col, op_before) + format!("where (events.{}, events.id) {} ($1, $2)", col, op_before) } (Some(after), Some(before)) => { args.extend_from_slice(&[ @@ -437,7 +692,7 @@ impl AuthorizedEvent { &before.key, ]); format!( - "where ({}, id) {} ($1, $2) and ({}, id) {} ($3, $4)", + "where (events.{}, events.id) {} ($1, $2) and (events.{}, events.id) {} ($3, $4)", col, op_after, col, op_before, ) }, @@ -457,24 +712,23 @@ impl AuthorizedEvent { format!("where write_roles && ${arg_index} and read_roles && ${arg_index}") }; let (selection, mapping) = select!( - event: AuthorizedEvent from - AuthorizedEvent::select().with_omitted_table_prefix("events"), + event: AuthorizedEvent, row_num, total_count, ); let query = format!( "select {selection} \ from (\ - select {event_cols}, \ + select events.*, \ row_number() over(order by ({sort_col}, id) {sort_order}) as row_num, \ count(*) over() as total_count \ from all_events as events \ {acl_filter} \ - order by ({sort_col}, id) {sort_order} \ - ) as tmp \ + ) as events \ + left join series on series.id = events.series \ {filter} \ + order by (events.{sort_col}, events.id) {sort_order} \ limit {limit}", - event_cols = Self::select(), sort_col = order.column.to_sql(), sort_order = sql_sort_order.to_sql(), limit = limit, @@ -566,7 +820,7 @@ impl From for Track { uri: src.uri, flavor: src.flavor, mimetype: src.mimetype, - resolution: src.resolution.map(Into::into), + resolution: src.resolution, is_master: src.is_master, } } @@ -713,8 +967,6 @@ pub(crate) struct EventPageInfo { pub(crate) has_next_page: bool, pub(crate) has_previous_page: bool, - // TODO: the spec says these shouldn't be optional, but that makes no sense. - // See: https://stackoverflow.com/q/70448483/2408867 pub(crate) start_cursor: Option, pub(crate) end_cursor: Option, @@ -729,3 +981,52 @@ pub(crate) struct EventPageInfo { pub(crate) struct RemovedEvent { id: Id, } + +#[derive(Debug)] +struct AclForDB { + // todo: add custom and preview roles when sent by frontend + // preview_roles: Vec, + read_roles: Vec, + write_roles: Vec, + // custom_action_roles: CustomActions, +} + +fn convert_acl_input(entries: Vec) -> AclForDB { + // let mut preview_roles = HashSet::new(); + let mut read_roles = HashSet::new(); + let mut write_roles = HashSet::new(); + // let mut custom_action_roles = CustomActions::default(); + + for entry in entries { + let role = entry.role; + for action in entry.actions { + match action.as_str() { + // "preview" => { + // preview_roles.insert(role.clone()); + // } + "read" => { + read_roles.insert(role.clone()); + } + "write" => { + write_roles.insert(role.clone()); + } + _ => { + // custom_action_roles + // .0 + // .entry(action) + // .or_insert_with(Vec::new) + // .push(role.clone()); + todo!(); + } + }; + } + } + + AclForDB { + // todo: add custom and preview roles when sent by frontend + // preview_roles: preview_roles.into_iter().collect(), + read_roles: read_roles.into_iter().collect(), + write_roles: write_roles.into_iter().collect(), + // custom_action_roles, + } +} diff --git a/backend/src/api/model/known_roles.rs b/backend/src/api/model/known_roles.rs index 50a706e2d..c207c3348 100644 --- a/backend/src/api/model/known_roles.rs +++ b/backend/src/api/model/known_roles.rs @@ -1,12 +1,13 @@ -use meilisearch_sdk::{Selectors, MatchingStrategies}; +use meilisearch_sdk::search::{Selectors, MatchingStrategies}; use serde::Deserialize; use crate::{ - api::{Context, err::ApiResult, util::TranslatedString}, - prelude::*, + api::{err::ApiResult, Context}, + model::TranslatedString, db::util::{impl_from_db, select}, + prelude::*, }; -use super::search::{SearchUnavailable, SearchResults, handle_search_result}; +use super::search::{handle_search_result, measure_search_duration, SearchResults, SearchUnavailable}; // ===== Groups =============================================================== @@ -16,7 +17,7 @@ use super::search::{SearchUnavailable, SearchResults, handle_search_result}; #[derive(juniper::GraphQLObject)] pub struct KnownGroup { pub(crate) role: String, - pub(crate) label: TranslatedString, + pub(crate) label: TranslatedString, pub(crate) implies: Vec, pub(crate) large: bool, } @@ -73,6 +74,7 @@ pub(crate) async fn search_known_users( query: String, context: &Context, ) -> ApiResult { + let elapsed_time = measure_search_duration(); if !context.auth.is_user() { return Err(context.not_logged_in_error()); } @@ -127,5 +129,5 @@ pub(crate) async fn search_known_users( items.extend(results.hits.into_iter().map(|h| h.result)); } - Ok(KnownUsersSearchOutcome::Results(SearchResults { items, total_hits })) + Ok(KnownUsersSearchOutcome::Results(SearchResults { items, total_hits, duration: elapsed_time() })) } diff --git a/backend/src/api/model/playlist/mod.rs b/backend/src/api/model/playlist/mod.rs index 45709fb6a..d1a9e3f57 100644 --- a/backend/src/api/model/playlist/mod.rs +++ b/backend/src/api/model/playlist/mod.rs @@ -3,9 +3,10 @@ use postgres_types::ToSql; use crate::{ api::{ - common::NotAllowed, err::ApiResult, Context, Id, Node + common::NotAllowed, err::ApiResult, Context, Id, Node, NodeValue }, - db::{types::Key, util::{impl_from_db, select}}, + db::util::{impl_from_db, select}, + model::Key, prelude::*, }; @@ -101,7 +102,7 @@ impl Playlist { } /// Represents an Opencast playlist. -#[graphql_object(Context = Context)] +#[graphql_object(Context = Context, impl = NodeValue)] impl AuthorizedPlaylist { fn id(&self) -> Id { Node::id(self) @@ -136,7 +137,8 @@ impl AuthorizedPlaylist { where (entry).type = 'event'\ ) select {selection} from event_ids \ - left join events on events.opencast_id = event_ids.id\ + left join events on events.opencast_id = event_ids.id \ + left join series on series.id = events.series\ "); context.db .query_mapped(&query, dbargs![&self.key], |row| { diff --git a/backend/src/api/model/realm/mod.rs b/backend/src/api/model/realm/mod.rs index 1d3a3307d..8414d5793 100644 --- a/backend/src/api/model/realm/mod.rs +++ b/backend/src/api/model/realm/mod.rs @@ -1,4 +1,4 @@ -use juniper::{graphql_object, GraphQLEnum, GraphQLObject, GraphQLUnion, graphql_interface}; +use juniper::{graphql_object, GraphQLEnum, GraphQLObject, GraphQLUnion}; use postgres_types::{FromSql, ToSql}; use crate::{ @@ -11,16 +11,19 @@ use crate::{ NodeValue, }, auth::AuthContext, - db::{types::Key, util::{impl_from_db, select}}, + db::util::{impl_from_db, select}, + model::Key, prelude::*, }; -use super::block::{Block, BlockValue, PlaylistBlock, SeriesBlock, VideoBlock}; +use super::block::BlockValue; mod mutations; pub(crate) use mutations::{ - ChildIndex, NewRealm, RemovedRealm, UpdateRealm, UpdatedPermissions, UpdatedRealmName, RealmSpecifier, + ChildIndex, NewRealm, RemovedRealm, UpdateRealm, UpdatedPermissions, + UpdatedRealmName, RealmSpecifier, RealmLineageComponent, CreateRealmLineageOutcome, + RemoveMountedSeriesOutcome, }; @@ -57,54 +60,11 @@ pub(crate) struct RealmNameFromBlock { /// A realm name that is derived from a block of that realm. #[graphql_object(Context = Context)] impl RealmNameFromBlock { - async fn block(&self, context: &Context) -> ApiResult { - match BlockValue::load_by_key(self.block, context).await? { - BlockValue::VideoBlock(b) => Ok(RealmNameSourceBlockValue::VideoBlock(b)), - BlockValue::SeriesBlock(b) => Ok(RealmNameSourceBlockValue::SeriesBlock(b)), - BlockValue::PlaylistBlock(b) => Ok(RealmNameSourceBlockValue::PlaylistBlock(b)), - _ => unreachable!("block {:?} has invalid type for name source", self.block), - } - } -} - -#[graphql_interface(Context = Context, for = [SeriesBlock, VideoBlock, PlaylistBlock])] -pub(crate) trait RealmNameSourceBlock: Block { - // TODO: we repeat the `id` method here from the `Block` and `Node` trait. - // This should be done in a better way. Since the Octobor 2021 spec, - // interfaces can implement other interfaces. Juniper will support this in - // the future. - fn id(&self) -> Id; -} - -impl RealmNameSourceBlock for SeriesBlock { - fn id(&self) -> Id { - self.shared.id - } -} - -impl RealmNameSourceBlock for VideoBlock { - fn id(&self) -> Id { - self.shared.id - } -} - -impl RealmNameSourceBlock for PlaylistBlock { - fn id(&self) -> Id { - self.shared.id + async fn block(&self, context: &Context) -> ApiResult { + BlockValue::load_by_key(self.block, context).await } } -impl Block for RealmNameSourceBlockValue { - fn shared(&self) -> &super::block::SharedData { - match self { - Self::SeriesBlock(b) => b.shared(), - Self::VideoBlock(b) => b.shared(), - Self::PlaylistBlock(b) => b.shared(), - } - } -} - - pub(crate) struct Realm { pub(crate) key: Key, pub(crate) parent_key: Option, @@ -153,7 +113,14 @@ impl_from_db!( impl Realm { pub(crate) async fn root(context: &Context) -> ApiResult { - let (selection, mapping) = select!(child_order, moderator_roles, admin_roles); + let (selection, mapping) = select!( + child_order, + moderator_roles, + admin_roles, + name, + name_from_block, + resolved_name: "realms.resolved_name", + ); let row = context.db .query_one(&format!("select {selection} from realms where id = 0"), &[]) .await?; @@ -161,9 +128,9 @@ impl Realm { Ok(Self { key: Key(0), parent_key: None, - plain_name: None, - resolved_name: None, - name_from_block: None, + plain_name: mapping.name.of(&row), + resolved_name: mapping.resolved_name.of(&row), + name_from_block: mapping.name_from_block.of(&row), path_segment: String::new(), full_path: String::new(), index: 0, @@ -218,18 +185,6 @@ impl Realm { .pipe(Ok) } - pub(crate) fn is_main_root(&self) -> bool { - self.key.0 == 0 - } - - pub(crate) fn is_user_realm(&self) -> bool { - self.full_path.starts_with("/@") - } - - pub(crate) fn is_user_root(&self) -> bool { - self.is_user_realm() && self.parent_key.is_none() - } - /// Returns the username of the user owning this realm tree IF it is a user /// realm. Otherwise returns `None`. pub(crate) fn owning_user(&self) -> Option<&str> { @@ -243,18 +198,6 @@ impl Realm { }) } - fn is_current_user_page_admin(&self, context: &Context) -> bool { - context.auth.is_global_page_admin(&context.config.auth) - || self.is_current_user_owner(context) - || context.auth.overlaps_roles(&self.flattened_admin_roles) - } - - fn can_current_user_moderate(&self, context: &Context) -> bool { - context.auth.is_global_page_moderator(&context.config.auth) - || self.is_current_user_owner(context) - || context.auth.overlaps_roles(&self.flattened_moderator_roles) - } - pub(crate) fn require_moderator_rights(&self, context: &Context) -> ApiResult<()> { if !self.can_current_user_moderate(context) { return Err(context.access_error("realm.no-moderator-rights", |user| format!( @@ -298,8 +241,8 @@ impl Realm { } /// The raw information about the name of the realm, showing where the name - /// is coming from and if there is no name, why that is. Is `null` for the - /// root realm, non-null for all other realms. + /// is coming from and if there is no name, why that is. Can be `null` only for the + /// root realm, must be non-null for all other realms. fn name_source(&self) -> Option { if let Some(name) = &self.plain_name { Some(RealmNameSource::Plain(PlainRealmName { @@ -314,17 +257,17 @@ impl Realm { /// Returns `true` if this is the root of the public realm tree (with path = "/"). fn is_main_root(&self) -> bool { - self.is_main_root() + self.key.0 == 0 } /// Returns true if this is the root of a user realm tree. fn is_user_root(&self) -> bool { - self.is_user_root() + self.is_user_realm() && self.parent_key.is_none() } /// Returns `true` if this realm is managed by a user (path starting with `/@`). fn is_user_realm(&self) -> bool { - self.is_user_realm() + self.full_path.starts_with("/@") } fn index(&self) -> i32 { @@ -357,7 +300,7 @@ impl Realm { self.owner_display_name.as_deref() } - /// Returns the acl of this realm, combining moderator and admin roles and assigns + /// Returns the acl of this realm, combining moderator and admin roles and assigns /// the respective actions that are necessary for UI purposes. async fn own_acl(&self, context: &Context) -> ApiResult { let raw_roles_sql = " @@ -410,7 +353,7 @@ impl Realm { /// ordered by the internal index. If `childOrder` returns an ordering /// different from `BY_INDEX`, the frontend is supposed to sort the /// children. - async fn children(&self, context: &Context) -> ApiResult> { + pub(crate) async fn children(&self, context: &Context) -> ApiResult> { let selection = Self::select(); let query = format!( "select {selection} \ @@ -455,12 +398,16 @@ impl Realm { /// and edit settings including changing the realm path, deleting the realm and editing /// the realm's acl. fn is_current_user_page_admin(&self, context: &Context) -> bool { - self.is_current_user_page_admin(context) + context.auth.is_global_page_admin(&context.config.auth) + || self.is_current_user_owner(context) + || context.auth.overlaps_roles(&self.flattened_admin_roles) } /// Returns whether the current user has the rights to add sub-pages and edit realm content /// and non-critical settings. fn can_current_user_moderate(&self, context: &Context) -> bool { - self.can_current_user_moderate(context) + context.auth.is_global_page_moderator(&context.config.auth) + || self.is_current_user_owner(context) + || context.auth.overlaps_roles(&self.flattened_moderator_roles) } } diff --git a/backend/src/api/model/realm/mutations.rs b/backend/src/api/model/realm/mutations.rs index 39508106e..5b34144be 100644 --- a/backend/src/api/model/realm/mutations.rs +++ b/backend/src/api/model/realm/mutations.rs @@ -1,9 +1,15 @@ use std::collections::{HashMap, HashSet}; use crate::{ - api::{Context, Id, err::{ApiResult, invalid_input, map_db_err}}, - db::types::Key, - prelude::*, auth::AuthContext, + api::{ + Context, + err::{invalid_input, map_db_err, ApiResult}, + Id, + model::block::RemovedBlock, + }, + auth::AuthContext, + model::Key, + prelude::*, }; use super::{Realm, RealmOrder}; @@ -198,8 +204,11 @@ impl Realm { realm.require_moderator_rights(context)?; let db = &context.db; - if name.plain.is_some() == name.block.is_some() { - return Err(invalid_input!("exactly one of name.block and name.plain has to be set")); + if name.plain.is_some() && name.block.is_some() { + return Err(invalid_input!("both name.block and name.plain cannot be set")); + } + if !realm.is_main_root() && name.plain.is_none() && name.block.is_none() { + return Err(invalid_input!("exactly one of name.block and name.plain must be set for non-main-root realms")); } let block = name.block .map(|id| id.key_for(Id::BLOCK_KIND) @@ -245,12 +254,12 @@ impl Realm { admin_roles = coalesce($3, admin_roles) \ where id = $1", &[&realm.key, &permissions.moderator_roles, &permissions.admin_roles], - ) - .await?; + ).await?; Self::load_by_key(realm.key, context).await.map(Option::unwrap).inspect_(|new| { info!( - "Updated permissions of realm {:?} ({}) from moderators: '{:?}' to '{:?}' and from admins: '{:?}' to '{:?}'", + "Updated permissions of realm {:?} ({}) from moderators: '{:?}' to '{:?}' \ + and from admins: '{:?}' to '{:?}'", realm.key, realm.full_path, realm.moderator_roles, @@ -339,6 +348,41 @@ impl Realm { info!(%id, path = realm.full_path, "Removed realm"); Ok(RemovedRealm { parent }) } + + pub(crate) async fn create_lineage( + realms: Vec, + context: &Context, + ) -> ApiResult { + context.auth.required_trusted_external()?; + + if realms.len() == 0 { + return Ok(CreateRealmLineageOutcome { num_created: 0 }); + } + + if context.config.general.reserved_paths().any(|r| realms[0].path_segment == r) { + return Err(invalid_input!(key = "realm.path-is-reserved", "path is reserved and cannot be used")); + } + + let mut parent_path = String::new(); + let mut num_created = 0; + for realm in realms { + let sql = "\ + insert into realms (parent, name, path_segment) \ + values ((select id from realms where full_path = $1), $2, $3) \ + on conflict do nothing"; + let res = context.db.execute(sql, &[&parent_path, &realm.name, &realm.path_segment]) + .await; + let affected = map_db_err!(res, { + if constraint == "valid_path" => invalid_input!("path invalid"), + })?; + num_created += affected as i32; + + parent_path.push('/'); + parent_path.push_str(&realm.path_segment); + } + + Ok(CreateRealmLineageOutcome { num_created }) + } } /// Makes sure the ID refers to a realm and returns its key. @@ -379,6 +423,13 @@ impl UpdatedRealmName { block: Some(block), } } + + pub(crate) fn plain(name: String) -> Self { + Self { + plain: Some(name), + block: None, + } + } } #[derive(juniper::GraphQLInputObject)] @@ -394,8 +445,26 @@ pub(crate) struct RealmSpecifier { pub(crate) path_segment: String, } +#[derive(Clone, juniper::GraphQLInputObject)] +pub(crate) struct RealmLineageComponent { + pub(crate) name: String, + pub(crate) path_segment: String, +} + #[derive(juniper::GraphQLObject)] #[graphql(Context = Context)] pub(crate) struct RemovedRealm { parent: Option, } + +#[derive(juniper::GraphQLObject)] +pub struct CreateRealmLineageOutcome { + pub num_created: i32, +} + +#[derive(juniper::GraphQLUnion)] +#[graphql(Context = Context)] +pub(crate) enum RemoveMountedSeriesOutcome { + RemovedRealm(RemovedRealm), + RemovedBlock(RemovedBlock), +} diff --git a/backend/src/api/model/search/event.rs b/backend/src/api/model/search/event.rs index 536c95c28..3a0aa0639 100644 --- a/backend/src/api/model/search/event.rs +++ b/backend/src/api/model/search/event.rs @@ -1,72 +1,155 @@ +use std::collections::HashMap; + use chrono::{DateTime, Utc}; +use juniper::GraphQLObject; +use meilisearch_sdk::search::MatchRange; use crate::{ - api::{Context, Node, Id, NodeValue}, - search, + api::{Context, Id, Node, NodeValue}, + auth::HasRoles, + db::types::TextAssetType, + search::{self, util::decode_acl}, }; - - -impl Node for search::Event { - fn id(&self) -> Id { - Id::search_event(self.id.0) - } +use super::{field_matches_for, match_ranges_for, ByteSpan, SearchRealm}; + + +#[derive(Debug, GraphQLObject)] +#[graphql(Context = Context, impl = NodeValue)] +pub(crate) struct SearchEvent { + pub id: Id, + pub series_id: Option, + pub series_title: Option, + pub title: String, + pub description: Option, + pub creators: Vec, + pub thumbnail: Option, + pub duration: f64, + pub created: DateTime, + pub start_time: Option>, + pub end_time: Option>, + pub is_live: bool, + pub audio_only: bool, + pub host_realms: Vec, + pub text_matches: Vec, + pub matches: SearchEventMatches, + pub has_password: bool, + pub user_is_authorized: bool, } -#[juniper::graphql_object(Context = Context, impl = NodeValue, name = "SearchEvent")] -impl search::Event { - fn id(&self) -> Id { - Node::id(self) - } - - fn title(&self) -> &str { - &self.title - } - - fn series_id(&self) -> Option { - self.series_id.map(|id| Id::search_series(id.0)) - } - - fn series_title(&self) -> Option<&str> { - self.series_title.as_deref() - } +#[derive(Debug, GraphQLObject, Default)] +pub struct SearchEventMatches { + title: Vec, + description: Vec, + series_title: Vec, + creators: Vec, +} - fn description(&self) -> Option<&str> { - self.description.as_deref() - } +#[derive(Debug, GraphQLObject)] +pub struct ArrayMatch { + index: i32, + span: ByteSpan, +} - fn creators(&self) -> &[String] { - &self.creators - } +/// A match inside an event's texts while searching. +#[derive(Debug, GraphQLObject)] +pub struct TextMatch { + /// Start of this timespan in number of milliseconds from the beginning of + /// the video. + pub start: f64, - fn thumbnail(&self) -> Option<&str> { - self.thumbnail.as_deref() - } + /// Duration of this timespan in number of milliseconds. + pub duration: f64, - fn duration(&self) -> f64 { - self.duration as f64 - } + /// The text containing the match, with some context + pub text: String, - fn is_live(&self) -> bool { - self.is_live - } + /// Source of this text. + pub ty: TextAssetType, - fn audio_only(&self) -> bool { - self.audio_only - } + /// Parts of `text` that should be highlighted. + pub highlights: Vec, +} - fn created(&self) -> DateTime { - self.created +impl Node for SearchEvent { + fn id(&self) -> Id { + self.id } +} - fn start_time(&self) -> Option> { - self.start_time +impl SearchEvent { + pub(crate) fn without_matches(src: search::Event, context: &Context) -> Self { + let read_roles = decode_acl(&src.read_roles); + let user_can_read = context.auth.overlaps_roles(read_roles); + Self::new_inner(src, vec![], SearchEventMatches::default(), user_can_read) } - fn end_time(&self) -> Option> { - self.end_time + pub(crate) fn new( + src: search::Event, + match_positions: Option<&HashMap>>, + context: &Context, + ) -> Self { + let mut text_matches = Vec::new(); + let read_roles = decode_acl(&src.read_roles); + let user_can_read = context.auth.overlaps_roles(read_roles); + if user_can_read { + src.slide_texts.resolve_matches( + match_ranges_for(match_positions, "slide_texts.texts"), + &mut text_matches, + TextAssetType::SlideText, + ); + src.caption_texts.resolve_matches( + match_ranges_for(match_positions, "caption_texts.texts"), + &mut text_matches, + TextAssetType::Caption, + ); + } + + let matches = SearchEventMatches { + title: field_matches_for(match_positions, "title"), + description: field_matches_for(match_positions, "description"), + series_title: field_matches_for(match_positions, "series_title"), + creators: match_ranges_for(match_positions, "creators") + .iter() + .filter_map(|m| { + m.indices.as_ref().and_then(|v| v.get(0)).map(|index| ArrayMatch { + span: ByteSpan { start: m.start as u32, len: m.length as u32 }, + index: *index as i32, + }) + }) + .take(8) + .collect(), + }; + + Self::new_inner(src, text_matches, matches, user_can_read) } - fn host_realms(&self) -> &[search::Realm] { - &self.host_realms + fn new_inner( + src: search::Event, + text_matches: Vec, + matches: SearchEventMatches, + user_can_read: bool, + ) -> Self { + Self { + id: Id::search_event(src.id.0), + series_id: src.series_id.map(|id| Id::search_series(id.0)), + series_title: src.series_title, + title: src.title, + description: src.description, + creators: src.creators, + thumbnail: src.thumbnail, + duration: src.duration as f64, + created: src.created, + start_time: src.start_time, + end_time: src.end_time, + is_live: src.is_live, + audio_only: src.audio_only, + host_realms: src.host_realms.into_iter() + .map(|r| SearchRealm::without_matches(r)) + .collect(), + text_matches, + matches, + has_password: src.has_password, + user_is_authorized: user_can_read, + } } } diff --git a/backend/src/api/model/search/mod.rs b/backend/src/api/model/search/mod.rs index 2f9f93d00..9730fce19 100644 --- a/backend/src/api/model/search/mod.rs +++ b/backend/src/api/model/search/mod.rs @@ -1,8 +1,9 @@ use chrono::{DateTime, Utc}; -use juniper::GraphQLObject; +use juniper::{GraphQLObject, GraphQLScalar, InputValue, ScalarValue}; +use meilisearch_sdk::search::{FederationOptions, MatchRange, QueryFederationOptions}; use once_cell::sync::Lazy; use regex::Regex; -use std::{borrow::Cow, fmt}; +use std::{borrow::Cow, collections::HashMap, fmt, time::Instant}; use crate::{ api::{ @@ -22,6 +23,12 @@ mod realm; mod series; mod playlist; +pub(crate) use self::{ + event::{SearchEvent, TextMatch}, + realm::SearchRealm, + series::SearchSeries, +}; + /// Marker type to signal that the search functionality is unavailable for some /// reason. @@ -48,36 +55,47 @@ pub(crate) enum SearchOutcome { pub(crate) struct SearchResults { pub(crate) items: Vec, pub(crate) total_hits: usize, + pub(crate) duration: i32, } -#[juniper::graphql_object(Context = Context)] -impl SearchResults { - fn items(&self) -> &[NodeValue] { - &self.items - } - fn total_hits(&self) -> i32 { - self.total_hits as i32 - } +macro_rules! make_search_results_object { + ($name:literal, $ty:ty) => { + #[juniper::graphql_object(Context = Context, name = $name)] + impl SearchResults<$ty> { + fn items(&self) -> &[$ty] { + &self.items + } + fn total_hits(&self) -> i32 { + self.total_hits as i32 + } + /// How long searching took in ms. + fn duration(&self) -> i32 { + self.duration + } + } + }; } -#[juniper::graphql_object(Context = Context, name = "EventSearchResults")] -impl SearchResults { - fn items(&self) -> &[search::Event] { - &self.items - } +make_search_results_object!("SearchResults", NodeValue); +make_search_results_object!("EventSearchResults", SearchEvent); +make_search_results_object!("SeriesSearchResults", SearchSeries); +make_search_results_object!("PlaylistSearchResults", search::Playlist); + +/// A byte range, encoded as two hex numbers separated by `-`. +#[derive(Debug, Clone, Copy, GraphQLScalar)] +#[graphql(parse_token(String))] +pub struct ByteSpan { + pub start: u32, + pub len: u32, } -#[juniper::graphql_object(Context = Context, name = "SeriesSearchResults")] -impl SearchResults { - fn items(&self) -> &[search::Series] { - &self.items +impl ByteSpan { + fn to_output(&self) -> juniper::Value { + juniper::Value::scalar(format!("{:x}-{:x}", self.start, self.len)) } -} -#[juniper::graphql_object(Context = Context, name = "PlaylistSearchResults")] -impl SearchResults { - fn items(&self) -> &[search::Playlist] { - &self.items + fn from_input(_input: &InputValue) -> Result { + unimplemented!("not used right now") } } @@ -118,7 +136,7 @@ macro_rules! handle_search_result { // to happen. In those cases, we just say that the search is currently // unavailable, instead of the general error. Err(e @ MsError::Meilisearch(MsRespError { error_code: MsErrorCode::IndexNotFound, .. })) - | Err(e @ MsError::UnreachableServer) + | Err(e @ MsError::HttpError(_)) | Err(e @ MsError::Timeout) => { error!("Meili search failed: {e} (=> replying 'search unavailable')"); return Ok(<$return_type>::SearchUnavailable(SearchUnavailable)); @@ -148,6 +166,7 @@ pub(crate) async fn perform( filters: Filters, context: &Context, ) -> ApiResult { + let elapsed_time = measure_search_duration(); if user_query.is_empty() { return Ok(SearchOutcome::EmptyQuery(EmptyQuery)); } @@ -158,116 +177,112 @@ pub(crate) async fn perform( let selection = search::Event::select(); let query = format!("select {selection} from search_events \ where id = (select id from events where opencast_id = $1) \ - and (read_roles || 'ROLE_ADMIN'::text) && $2"); + and (preview_roles || read_roles || 'ROLE_ADMIN'::text) && $2"); let items: Vec = context.db .query_opt(&query, &[&uuid_query, &context.auth.roles_vec()]) .await? - .map(|row| search::Event::from_row_start(&row).into()) + .map(|row| { + let e = search::Event::from_row_start(&row); + SearchEvent::without_matches(e, &context).into() + }) .into_iter() .collect(); let total_hits = items.len(); - return Ok(SearchOutcome::Results(SearchResults { items, total_hits })); + return Ok(SearchOutcome::Results(SearchResults { + items, + total_hits, + duration: elapsed_time(), + })); } // Prepare the event search - let filter = Filter::And( - std::iter::once(Filter::Leaf("listed = true".into())) - .chain(acl_filter("read_roles", context)) - // Filter out live events that are already over. - .chain([Filter::Or([ - Filter::Leaf("is_live = false ".into()), - Filter::Leaf(format!("end_time_timestamp >= {}", Utc::now().timestamp()).into()), - ].into())]) - .chain(filters.start.map(|start| Filter::Leaf(format!("created_timestamp >= {}", start.timestamp()).into()))) - .chain(filters.end.map(|end| Filter::Leaf(format!("created_timestamp <= {}", end.timestamp()).into()))) - .collect() - ).to_string(); + let filter = Filter::and([ + Filter::listed(), + Filter::preview_or_read_access(context), + // Filter out live events that already ended + Filter::or([ + Filter::Leaf("is_live = false ".into()), + Filter::Leaf(format!("end_time_timestamp >= {}", Utc::now().timestamp()).into()), + ]), + // Apply user selected date filters + filters.start + .map(|start| Filter::Leaf(format!("created_timestamp >= {}", start.timestamp()).into())) + .unwrap_or(Filter::True), + filters.end + .map(|end| Filter::Leaf(format!("created_timestamp <= {}", end.timestamp()).into())) + .unwrap_or(Filter::True), + ]).to_string(); let event_query = context.search.event_index.search() .with_query(user_query) - .with_limit(15) .with_show_matches_position(true) .with_filter(&filter) - .with_show_ranking_score(true) .build(); - // Prepare the series search let series_query = context.search.series_index.search() .with_query(user_query) .with_show_matches_position(true) .with_filter("listed = true") - .with_limit(15) - .with_show_ranking_score(true) + .with_federation_options(QueryFederationOptions { + weight: Some(1.1), + }) .build(); - // Prepare the realm search let realm_query = context.search.realm_index.search() .with_query(user_query) - .with_limit(10) .with_filter("is_user_realm = false") .with_show_matches_position(true) - .with_show_ranking_score(true) .build(); - - // Perform the searches - let res = tokio::try_join!( - event_query.execute::(), - series_query.execute::(), - realm_query.execute::(), - ); - let (event_results, series_results, realm_results) = handle_search_result!(res, SearchOutcome); - - // Merge results according to Meilis score. - // - // TODO: Comparing scores of different indices is not well defined right now. - // We can either use score details or adding dummy searchable fields to the - // realm index. See this discussion for more info: - // https://github.com/orgs/meilisearch/discussions/489#discussioncomment-6160361 - let events = event_results.hits.into_iter() - .map(|result| (NodeValue::from(result.result), result.ranking_score)); - let series = series_results.hits.into_iter() - .map(|result| (NodeValue::from(result.result), result.ranking_score)); - let realms = realm_results.hits.into_iter() - .map(|result| (NodeValue::from(result.result), result.ranking_score)); - - let mut merged: Vec<(NodeValue, Option)> = Vec::new(); - let total_hits: usize; - - match filters.item_type { - Some(ItemType::Event) => { - merged.extend(events); - total_hits = event_results.estimated_total_hits.unwrap_or(0); - }, - Some(ItemType::Series) => { - merged.extend(series); - total_hits = series_results.estimated_total_hits.unwrap_or(0); - }, - Some(ItemType::Realm) => { - merged.extend(realms); - total_hits = realm_results.estimated_total_hits.unwrap_or(0); - }, - None => { - merged.extend(events); - merged.extend(series); - merged.extend(realms); - total_hits = [ - event_results.estimated_total_hits, - series_results.estimated_total_hits, - realm_results.estimated_total_hits, - ] - .iter() - .filter_map(|&x| x) - .sum(); - }, + let mut multi_search = context.search.client.multi_search(); + if matches!(filters.item_type, None | Some(ItemType::Event)) { + multi_search.with_search_query(event_query); + } + if matches!(filters.item_type, None | Some(ItemType::Series)) { + multi_search.with_search_query(series_query); + } + if matches!(filters.item_type, None | Some(ItemType::Realm)) { + multi_search.with_search_query(realm_query); + } + let multi_search = multi_search.with_federation(FederationOptions { + limit: Some(30), + offset: Some(0), // TODO: pagination + ..Default::default() + }); + + + #[derive(serde::Deserialize)] + #[serde(untagged)] + enum MultiSearchItem { + Event(search::Event), + Series(search::Series), + Realm(search::Realm), } - merged.sort_unstable_by(|(_, score0), (_, score1)| score1.unwrap().total_cmp(&score0.unwrap())); - - let items = merged.into_iter().map(|(node, _)| node).collect(); - Ok(SearchOutcome::Results(SearchResults { items, total_hits })) + // TODO: Check if sort order makes sense. That's because comparing scores of + // different indices is not well defined right now. We can either use score + // details or adding dummy searchable fields to the realm index. See this + // discussion for more info: + // https://github.com/orgs/meilisearch/discussions/489#discussioncomment-6160361 + let res = handle_search_result!(multi_search.execute::().await, SearchOutcome); + + let items = res.hits.into_iter() + .map(|res| { + let mp = res.matches_position.as_ref(); + match res.result { + MultiSearchItem::Event(event) => NodeValue::from(SearchEvent::new(event, mp, &context)), + MultiSearchItem::Series(series) => NodeValue::from(SearchSeries::new(series, mp, context)), + MultiSearchItem::Realm(realm) => NodeValue::from(SearchRealm::new(realm, mp)), + } + }) + .collect(); + Ok(SearchOutcome::Results(SearchResults { + items, + total_hits: res.estimated_total_hits, + duration: elapsed_time(), + })) } fn looks_like_opencast_uuid(query: &str) -> bool { @@ -286,7 +301,7 @@ fn looks_like_opencast_uuid(query: &str) -> bool { #[graphql(Context = Context)] pub(crate) enum EventSearchOutcome { SearchUnavailable(SearchUnavailable), - Results(SearchResults), + Results(SearchResults), } pub(crate) async fn all_events( @@ -294,19 +309,23 @@ pub(crate) async fn all_events( writable_only: bool, context: &Context, ) -> ApiResult { + let elapsed_time = measure_search_duration(); if !context.auth.is_user() { return Err(context.not_logged_in_error()); } - let filter = Filter::make_or_none_for_admins(context, || { + let filter = Filter::make_or_true_for_admins(context, || { // All users can always find all events they have write access to. If // `writable_only` is false, this API also returns events that are // listed and that the user can read. - let writable = Filter::acl_access("write_roles", context); + let writable = Filter::write_access(context); if writable_only { writable } else { - Filter::or([Filter::listed_and_readable(context), writable]) + Filter::or([ + Filter::preview_or_read_access(context).and_listed(context), + writable, + ]) } }).to_string(); @@ -314,16 +333,20 @@ pub(crate) async fn all_events( query.with_query(user_query); query.with_limit(50); query.with_show_matches_position(true); + // We don't want to search through, nor retrieve the event texts. + query.with_attributes_to_search_on(&["title", "creators", "series_title"]); query.with_filter(&filter); if user_query.is_empty() { query.with_sort(&["updated_timestamp:desc"]); } let res = query.execute::().await; let results = handle_search_result!(res, EventSearchOutcome); - let items = results.hits.into_iter().map(|h| h.result).collect(); + let items = results.hits.into_iter() + .map(|h| SearchEvent::new(h.result, h.matches_position.as_ref(), &context)) + .collect(); let total_hits = results.estimated_total_hits.unwrap_or(0); - Ok(EventSearchOutcome::Results(SearchResults { items, total_hits })) + Ok(EventSearchOutcome::Results(SearchResults { items, total_hits, duration: elapsed_time() })) } // See `EventSearchOutcome` for additional information. @@ -331,7 +354,7 @@ pub(crate) async fn all_events( #[graphql(Context = Context)] pub(crate) enum SeriesSearchOutcome { SearchUnavailable(SearchUnavailable), - Results(SearchResults), + Results(SearchResults), } pub(crate) async fn all_series( @@ -339,12 +362,13 @@ pub(crate) async fn all_series( writable_only: bool, context: &Context, ) -> ApiResult { + let elapsed_time = measure_search_duration(); if !context.auth.is_user() { return Err(context.not_logged_in_error()); } - let filter = Filter::make_or_none_for_admins(context, || { - let writable = Filter::acl_access("write_roles", context); + let filter = Filter::make_or_true_for_admins(context, || { + let writable = Filter::write_access(context); // All users can always find all items they have write access to, // regardless whether they are listed or not. @@ -355,7 +379,7 @@ pub(crate) async fn all_series( // Since series read_roles are not used for access control, we only need // to check whether we can return unlisted videos. if context.auth.can_find_unlisted_items(&context.config.auth) { - Filter::None + Filter::True } else { Filter::or([writable, Filter::listed()]) } @@ -371,10 +395,12 @@ pub(crate) async fn all_series( } let res = query.execute::().await; let results = handle_search_result!(res, SeriesSearchOutcome); - let items = results.hits.into_iter().map(|h| h.result).collect(); + let items = results.hits.into_iter() + .map(|h| SearchSeries::new(h.result, h.matches_position.as_ref(), context)) + .collect(); let total_hits = results.estimated_total_hits.unwrap_or(0); - Ok(SeriesSearchOutcome::Results(SearchResults { items, total_hits })) + Ok(SeriesSearchOutcome::Results(SearchResults { items, total_hits, duration: elapsed_time() })) } #[derive(juniper::GraphQLUnion)] @@ -389,19 +415,23 @@ pub(crate) async fn all_playlists( writable_only: bool, context: &Context, ) -> ApiResult { + let elapsed_time = measure_search_duration(); if !context.auth.is_user() { return Err(context.not_logged_in_error()); } - let filter = Filter::make_or_none_for_admins(context, || { + let filter = Filter::make_or_true_for_admins(context, || { // All users can always find all playlists they have write access to. If // `writable_only` is false, this API also returns playlists that are // listed and that the user can read. - let writable = Filter::acl_access("write_roles", context); + let writable = Filter::write_access(context); if writable_only { writable } else { - Filter::or([Filter::listed_and_readable(context), writable]) + Filter::or([ + Filter::read_access(context).and_listed(context), + writable, + ]) } }).to_string(); @@ -418,42 +448,49 @@ pub(crate) async fn all_playlists( let items = results.hits.into_iter().map(|h| h.result).collect(); let total_hits = results.estimated_total_hits.unwrap_or(0); - Ok(PlaylistSearchOutcome::Results(SearchResults { items, total_hits })) + Ok(PlaylistSearchOutcome::Results(SearchResults { items, total_hits, duration: elapsed_time() })) } -// TODO: replace usages of this and remove this. -fn acl_filter(action: &str, context: &Context) -> Option { - // If the user is admin, we just skip the filter alltogether as the admin - // can see anything anyway. - if context.auth.is_admin() { - return None; - } - - Some(Filter::acl_access(action, context)) -} enum Filter { // TODO: try to avoid Vec if not necessary. Oftentimes there are only two operands. + + /// Must not contain `Filter::None`, which is handled by `Filter::and`. And(Vec), + + /// Must not contain `Filter::None`, which is handled by `Filter::or`. Or(Vec), Leaf(Cow<'static, str>), - /// No filter. Formats to empty string and is filtered out if inside the - /// `And` or `Or` operands. - None, + /// A constant `true`. Inside `Or`, results in the whole `Or` expression + /// being replaced by `True`. Inside `And`, this is just filtered out and + /// the remaining operands are evaluated. If formated on its own, empty + /// string is emitted. + True, } impl Filter { - fn make_or_none_for_admins(context: &Context, f: impl FnOnce() -> Self) -> Self { - if context.auth.is_admin() { Self::None } else { f() } + fn make_or_true_for_admins(context: &Context, f: impl FnOnce() -> Self) -> Self { + if context.auth.is_admin() { Self::True } else { f() } } fn or(operands: impl IntoIterator) -> Self { - Self::Or(operands.into_iter().collect()) + let mut v = Vec::new(); + for op in operands { + if matches!(op, Self::True) { + return Self::True; + } + v.push(op); + } + Self::Or(v) } fn and(operands: impl IntoIterator) -> Self { - Self::And(operands.into_iter().collect()) + Self::And( + operands.into_iter() + .filter(|op| !matches!(op, Self::True)) + .collect(), + ) } /// Returns the filter "listed = true". @@ -461,22 +498,35 @@ impl Filter { Self::Leaf("listed = true".into()) } - /// Returns a filter checking that the current user has read access and that - /// the item is listed. If the user has the privilege to find unlisted - /// item, the second check is not performed. - fn listed_and_readable(context: &Context) -> Self { - let readable = Self::acl_access("read_roles", context); + /// If the user can find unlisted items, just returns `self`. Otherweise, + /// `self` is ANDed with `Self::listed()`. + fn and_listed(self, context: &Context) -> Self { if context.auth.can_find_unlisted_items(&context.config.auth) { - readable + self } else { - Self::and([readable, Self::listed()]) + Self::and([self, Self::listed()]) } } + fn read_access(context: &Context) -> Self { + Self::make_or_true_for_admins(context, || Self::acl_access_raw("read_roles", context)) + } + + fn write_access(context: &Context) -> Self { + Self::make_or_true_for_admins(context, || Self::acl_access_raw("write_roles", context)) + } + + fn preview_or_read_access(context: &Context) -> Self { + Self::make_or_true_for_admins(context, || Self::or([ + Self::acl_access_raw("read_roles", context), + Self::acl_access_raw("preview_roles", context), + ])) + } + /// Returns a filter checking if `roles_field` has any overlap with the /// current user roles. Encodes all roles as hex to work around Meili's - /// lack of case-sensitive comparison. - fn acl_access(roles_field: &str, context: &Context) -> Self { + /// lack of case-sensitive comparison. Does not handle the ROLE_ADMIN case. + fn acl_access_raw(roles_field: &str, context: &Context) -> Self { use std::io::Write; const HEX_DIGITS: &[u8; 16] = b"0123456789abcdef"; @@ -503,10 +553,8 @@ impl Filter { impl fmt::Display for Filter { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn join(f: &mut fmt::Formatter, operands: &[Filter], sep: &str) -> fmt::Result { - if operands.iter().all(|op| matches!(op, Filter::None)) { - return Ok(()); - } - + // We are guaranteed by `and` and `or` methods that there are no + // `Self::True`s in here. write!(f, "(")?; for (i, operand) in operands.iter().enumerate() { if i > 0 { @@ -521,7 +569,33 @@ impl fmt::Display for Filter { Self::And(operands) => join(f, operands, "AND"), Self::Or(operands) => join(f, operands, "OR"), Self::Leaf(s) => write!(f, "{s}"), - Self::None => Ok(()), + Self::True => Ok(()), } } } + + +fn match_ranges_for<'a>( + match_positions: Option<&'a HashMap>>, + field: &str, +) -> &'a [MatchRange] { + match_positions + .and_then(|m| m.get(field)) + .map(|v| v.as_slice()) + .unwrap_or_default() +} + +fn field_matches_for( + match_positions: Option<&HashMap>>, + field: &str, +) -> Vec { + match_ranges_for(match_positions, field).iter() + .map(|m| ByteSpan { start: m.start as u32, len: m.length as u32 }) + .take(8) // The frontend can only show a limited number anyway + .collect() +} + +pub(crate) fn measure_search_duration() -> impl FnOnce() -> i32 { + let start = Instant::now(); + move || start.elapsed().as_millis() as i32 +} diff --git a/backend/src/api/model/search/playlist.rs b/backend/src/api/model/search/playlist.rs index e3819e164..4970e5651 100644 --- a/backend/src/api/model/search/playlist.rs +++ b/backend/src/api/model/search/playlist.rs @@ -1,5 +1,5 @@ use crate::{ - api::{Context, Node, Id, NodeValue}, + api::{model::search::SearchRealm, Context, Id, Node, NodeValue}, search, }; @@ -28,7 +28,9 @@ impl search::Playlist { self.description.as_deref() } - fn host_realms(&self) -> &[search::Realm] { - &self.host_realms + fn host_realms(&self) -> Vec { + self.host_realms.iter() + .map(|r| SearchRealm::without_matches(r.clone())) + .collect() } } diff --git a/backend/src/api/model/search/realm.rs b/backend/src/api/model/search/realm.rs index d6e9d75c0..ebecc33a9 100644 --- a/backend/src/api/model/search/realm.rs +++ b/backend/src/api/model/search/realm.rs @@ -1,30 +1,60 @@ +use std::collections::HashMap; + +use juniper::GraphQLObject; +use meilisearch_sdk::search::MatchRange; + use crate::{ api::{Context, Node, Id, NodeValue}, search, }; +use super::{field_matches_for, ByteSpan}; -impl Node for search::Realm { - fn id(&self) -> Id { - Id::search_realm(self.id.0) - } +#[derive(Debug, GraphQLObject)] +#[graphql(Context = Context, impl = NodeValue)] +pub(crate) struct SearchRealm { + id: Id, + name: Option, + path: String, + ancestor_names: Vec>, + matches: SearchRealmMatches, } -#[juniper::graphql_object(Context = Context, impl = NodeValue, name = "SearchRealm")] -impl search::Realm { + +#[derive(Debug, GraphQLObject, Default)] +pub struct SearchRealmMatches { + name: Vec, +} + +impl Node for SearchRealm { fn id(&self) -> Id { - Node::id(self) + self.id } +} + - fn name(&self) -> Option<&str> { - self.name.as_deref() +impl SearchRealm { + pub(crate) fn without_matches(src: search::Realm) -> Self { + Self::new_inner(src, SearchRealmMatches::default()) } - fn path(&self) -> &str { - if self.full_path.is_empty() { "/" } else { &self.full_path } + pub(crate) fn new( + src: search::Realm, + match_positions: Option<&HashMap>>, + ) -> Self { + let matches = SearchRealmMatches { + name: field_matches_for(match_positions, "name"), + }; + Self::new_inner(src, matches) } - fn ancestor_names(&self) -> &[Option] { - &self.ancestor_names + fn new_inner(src: search::Realm, matches: SearchRealmMatches) -> Self { + Self { + id: Id::search_realm(src.id.0), + name: src.name, + path: if src.full_path.is_empty() { "/".into() } else { src.full_path }, + ancestor_names: src.ancestor_names, + matches, + } } } diff --git a/backend/src/api/model/search/series.rs b/backend/src/api/model/search/series.rs index d7fa54d57..1fed007e0 100644 --- a/backend/src/api/model/search/series.rs +++ b/backend/src/api/model/search/series.rs @@ -1,48 +1,70 @@ +use std::collections::HashMap; + +use juniper::GraphQLObject; +use meilisearch_sdk::search::MatchRange; + use crate::{ api::{Context, Id, Node, NodeValue}, search, HasRoles, }; -use super::ThumbnailInfo; +use super::{field_matches_for, ByteSpan, SearchRealm, ThumbnailInfo}; -impl Node for search::Series { - fn id(&self) -> Id { - Id::search_series(self.id.0) - } +#[derive(Debug, GraphQLObject)] +#[graphql(Context = Context, impl = NodeValue)] +pub(crate) struct SearchSeries { + id: Id, + opencast_id: String, + title: String, + description: Option, + host_realms: Vec, + thumbnails: Vec, + matches: SearchSeriesMatches, } -#[juniper::graphql_object(Context = Context, impl = NodeValue, name = "SearchSeries")] -impl search::Series { - fn id(&self) -> Id { - Node::id(self) - } - fn opencast_id(&self) -> &str { - &self.opencast_id - } - - fn title(&self) -> &str { - &self.title - } +#[derive(Debug, GraphQLObject, Default)] +pub struct SearchSeriesMatches { + title: Vec, + description: Vec, +} - fn description(&self) -> Option<&str> { - self.description.as_deref() +impl Node for SearchSeries { + fn id(&self) -> Id { + self.id } +} - fn host_realms(&self) -> &[search::Realm] { - &self.host_realms - } +impl SearchSeries { + pub(crate) fn new( + src: search::Series, + match_positions: Option<&HashMap>>, + context: &Context, + ) -> Self { + let matches = SearchSeriesMatches { + title: field_matches_for(match_positions, "title"), + description: field_matches_for(match_positions, "description"), + }; - fn thumbnails(&self, context: &Context) -> Vec { - self.thumbnails.iter() - .filter(|info| context.auth.overlaps_roles(&info.read_roles)) - .map(|info| ThumbnailInfo { - thumbnail: info.url.clone(), - audio_only: info.audio_only, - is_live: info.live, - }) - .take(3) - .collect() + Self { + id: Id::search_series(src.id.0), + opencast_id: src.opencast_id, + title: src.title, + description: src.description, + host_realms: src.host_realms.into_iter() + .map(|r| SearchRealm::without_matches(r)) + .collect(), + thumbnails: src.thumbnails.iter() + .filter(|info| context.auth.overlaps_roles(&info.read_roles)) + .map(|info| ThumbnailInfo { + thumbnail: info.url.clone(), + audio_only: info.audio_only, + is_live: info.live, + }) + .take(3) + .collect(), + matches, + } } } diff --git a/backend/src/api/model/series.rs b/backend/src/api/model/series.rs index 645a4290d..d003ad297 100644 --- a/backend/src/api/model/series.rs +++ b/backend/src/api/model/series.rs @@ -4,40 +4,50 @@ use postgres_types::ToSql; use crate::{ api::{ - Context, - err::ApiResult, - Id, + Context, Id, Node, NodeValue, + err::{invalid_input, ApiResult}, model::{ - realm::Realm, event::AuthorizedEvent, + realm::Realm, + acl::{self, Acl}, }, - Node, }, - db::{types::{ExtraMetadata, Key, SeriesState as State}, util::impl_from_db}, + db::{types::SeriesState as State, util::impl_from_db}, + model::{Key, ExtraMetadata}, prelude::*, }; -use super::playlist::VideoListEntry; +use super::{ + block::{BlockValue, NewSeriesBlock, VideoListLayout, VideoListOrder}, + playlist::VideoListEntry, + realm::{NewRealm, RealmSpecifier, RemoveMountedSeriesOutcome, UpdatedRealmName}, +}; pub(crate) struct Series { pub(crate) key: Key, - opencast_id: String, - synced_data: Option, - title: String, - created: Option>, - metadata: Option, + pub(crate) opencast_id: String, + pub(crate) synced_data: Option, + pub(crate) title: String, + pub(crate) created: Option>, + pub(crate) metadata: Option, + pub(crate) read_roles: Option>, + pub(crate) write_roles: Option>, } #[derive(GraphQLObject)] -struct SyncedSeriesData { +pub(crate) struct SyncedSeriesData { description: Option, } impl_from_db!( Series, select: { - series.{ id, opencast_id, state, title, description, created, metadata }, + series.{ + id, opencast_id, state, + title, description, created, + metadata, read_roles, write_roles, + }, }, |row| { Series { @@ -46,6 +56,8 @@ impl_from_db!( title: row.title(), created: row.created(), metadata: row.metadata(), + read_roles: row.read_roles(), + write_roles: row.write_roles(), synced_data: (State::Ready == row.state()).then( || SyncedSeriesData { description: row.description(), @@ -86,6 +98,23 @@ impl Series { .pipe(Ok) } + async fn load_acl(&self, context: &Context) -> ApiResult> { + match (self.read_roles.as_ref(), self.write_roles.as_ref()) { + (None, None) => Ok(None), + (read_roles, write_roles) => { + let raw_roles_sql = "\ + select unnest($1::text[]) as role, 'read' as action + union + select unnest($2::text[]) as role, 'write' as action + "; + + acl::load_for(context, raw_roles_sql, dbargs![&read_roles, &write_roles]) + .await + .map(Some) + } + } + } + pub(crate) async fn create(series: NewSeries, context: &Context) -> ApiResult { let selection = Self::select(); let query = format!( @@ -99,10 +128,152 @@ impl Series { .pipe(|row| Self::from_row_start(&row)) .pipe(Ok) } + + pub(crate) async fn announce(series: NewSeries, context: &Context) -> ApiResult { + context.auth.required_trusted_external()?; + Self::create(series, context).await + } + + pub(crate) async fn add_mount_point( + series_oc_id: String, + target_path: String, + context: &Context, + ) -> ApiResult { + context.auth.required_trusted_external()?; + + let series = Self::load_by_opencast_id(series_oc_id, context) + .await? + .ok_or_else(|| invalid_input!("`seriesId` does not refer to a valid series"))?; + + let target_realm = Realm::load_by_path(target_path, context) + .await? + .ok_or_else(|| invalid_input!("`targetPath` does not refer to a valid realm"))?; + + let blocks = BlockValue::load_for_realm(target_realm.key, context).await?; + if !blocks.is_empty() { + return Err(invalid_input!("series can only be mounted in empty realms")); + } + + BlockValue::add_series( + Id::realm(target_realm.key), + 0, + NewSeriesBlock { + series: series.id(), + show_title: false, + show_metadata: true, + order: VideoListOrder::NewToOld, + layout: VideoListLayout::Gallery, + }, + context, + ).await?; + + let block = &BlockValue::load_for_realm(target_realm.key, context).await?[0]; + + Realm::rename( + target_realm.id(), + UpdatedRealmName::from_block(block.id()), + context, + ).await + } + + pub(crate) async fn remove_mount_point( + series_oc_id: String, + path: String, + context: &Context, + ) -> ApiResult { + context.auth.required_trusted_external()?; + + let series = Self::load_by_opencast_id(series_oc_id, context) + .await? + .ok_or_else(|| invalid_input!("`seriesId` does not refer to a valid series"))?; + + let old_realm = Realm::load_by_path(path, context) + .await? + .ok_or_else(|| invalid_input!("`path` does not refer to a valid realm"))?; + + let blocks = BlockValue::load_for_realm(old_realm.key, context).await?; + + if blocks.len() != 1 { + return Err(invalid_input!("series can only be removed if it is the realm's only block")); + } + + if !matches!(&blocks[0], BlockValue::SeriesBlock(b) if b.series == Some(series.id())) { + return Err(invalid_input!("the series is not mounted on the specified realm")); + } + + if old_realm.children(context).await?.len() == 0 { + // The realm has no children, so it can be removed. + let removed_realm = Realm::remove(old_realm.id(), context).await?; + + return Ok(RemoveMountedSeriesOutcome::RemovedRealm(removed_realm)); + } + + if old_realm.name_from_block.map(Id::block) == Some(blocks[0].id()) { + // The realm has its name derived from the series block that is being removed - so the name + // shouldn't be used anymore. Ideally this would restore the previous title, + // but that isn't stored anywhere. Instead the realm is given the name of its path segment. + Realm::rename( + old_realm.id(), + UpdatedRealmName::plain(old_realm.path_segment), + context, + ).await?; + } + + let removed_block = BlockValue::remove(blocks[0].id(), context).await?; + + Ok(RemoveMountedSeriesOutcome::RemovedBlock(removed_block)) + } + + pub(crate) async fn mount( + series: NewSeries, + parent_realm_path: String, + new_realms: Vec, + context: &Context, + ) -> ApiResult { + context.auth.required_trusted_external()?; + + // Check parameters + if new_realms.iter().rev().skip(1).any(|r| r.name.is_none()) { + return Err(invalid_input!("all new realms except the last need to have a name")); + } + + let parent_realm = Realm::load_by_path(parent_realm_path, context) + .await? + .ok_or_else(|| invalid_input!("`parentRealmPath` does not refer to a valid realm"))?; + + if new_realms.is_empty() { + let blocks = BlockValue::load_for_realm(parent_realm.key, context).await?; + if !blocks.is_empty() { + return Err(invalid_input!("series can only be mounted in empty realms")); + } + } + + // Create series + let series = Series::create(series, context).await?; + + // Create realms + let target_realm = { + let mut target_realm = parent_realm; + for RealmSpecifier { name, path_segment } in new_realms { + target_realm = Realm::add(NewRealm { + // The `unwrap_or` case is only potentially used for the + // last realm, which is renamed below anyway. See the check + // above. + name: name.unwrap_or_else(|| "temporary-dummy-name".into()), + path_segment, + parent: Id::realm(target_realm.key), + }, context).await? + } + target_realm + }; + + // Create mount point + Self::add_mount_point(series.opencast_id, target_realm.full_path, context).await + } } /// Represents an Opencast series. -#[graphql_object(Context = Context)] +#[graphql_object(Context = Context, impl = NodeValue)] impl Series { fn id(&self) -> Id { Node::id(self) @@ -128,6 +299,10 @@ impl Series { &self.synced_data } + async fn acl(&self, context: &Context) -> ApiResult> { + self.load_acl(context).await + } + async fn host_realms(&self, context: &Context) -> ApiResult> { let selection = Realm::select(); let query = format!("\ @@ -176,7 +351,7 @@ impl Node for Series { #[derive(GraphQLInputObject)] pub(crate) struct NewSeries { - opencast_id: String, + pub(crate) opencast_id: String, title: String, // TODO In the future this `struct` can be extended with additional // (potentially optional) fields. For now we only need these. diff --git a/backend/src/api/model/user.rs b/backend/src/api/model/user.rs index 6cc93b398..d38e5affb 100644 --- a/backend/src/api/model/user.rs +++ b/backend/src/api/model/user.rs @@ -42,21 +42,21 @@ impl User { /// `True` if the user has the permission to upload videos. fn can_upload(&self, context: &Context) -> bool { - self.can_upload(&context.config.auth) + HasRoles::can_upload(self, &context.config.auth) } /// `True` if the user has the permission to use Opencast Studio. fn can_use_studio(&self, context: &Context) -> bool { - self.can_use_studio(&context.config.auth) + HasRoles::can_use_studio(self, &context.config.auth) } /// `True` if the user has the permission to use Opencast Studio. fn can_use_editor(&self, context: &Context) -> bool { - self.can_use_editor(&context.config.auth) + HasRoles::can_use_editor(self, &context.config.auth) } fn can_create_user_realm(&self, context: &Context) -> bool { - self.can_create_user_realm(&context.config.auth) + HasRoles::can_create_user_realm(self, &context.config.auth) } /// `True` if the user is allowed to find unlisted items when editing page content. @@ -70,9 +70,9 @@ impl User { /// are not yet removed from Tobira's database). /// /// Exactly one of `first` and `last` must be set! - #[graphql(arguments(order(default = Default::default())))] async fn my_videos( &self, + #[graphql(default)] order: EventSortOrder, first: Option, after: Option, diff --git a/backend/src/api/mutation.rs b/backend/src/api/mutation.rs index 33bf2abac..b96f622e4 100644 --- a/backend/src/api/mutation.rs +++ b/backend/src/api/mutation.rs @@ -1,15 +1,12 @@ use juniper::graphql_object; -use crate::{ - api::model::event::RemovedEvent, - auth::AuthContext, -}; +use crate::api::model::event::RemovedEvent; use super::{ Context, - err::{ApiResult, invalid_input, not_authorized}, + err::ApiResult, id::Id, - Node, model::{ + acl::AclInputEntry, series::{Series, NewSeries}, realm::{ ChildIndex, @@ -21,6 +18,9 @@ use super::{ UpdatedRealmName, UpdateRealm, RealmSpecifier, + RealmLineageComponent, + CreateRealmLineageOutcome, + RemoveMountedSeriesOutcome, }, block::{ BlockValue, @@ -35,8 +35,6 @@ use super::{ UpdatePlaylistBlock, UpdateVideoBlock, RemovedBlock, - VideoListOrder, - VideoListLayout, }, event::AuthorizedEvent, }, @@ -61,7 +59,7 @@ impl Mutation { /// Deletes the given event. Meaning: a deletion request is sent to Opencast, the event /// is marked as "deletion pending" in Tobira, and fully removed once Opencast /// finished deleting the event. - /// + /// /// Returns the deletion timestamp in case of success and errors otherwise. /// Note that "success" in this case only means the request was successfully sent /// and accepted, not that the deletion itself succeeded, which is instead checked @@ -70,18 +68,25 @@ impl Mutation { AuthorizedEvent::delete(id, context).await } + /// Updates the acl of a given event by sending the changes to Opencast. + /// The `acl` parameter can include `read` and `write` roles, as these are the + /// only roles that can be assigned in frontend for now. `preview` and + /// `custom_actions` will be added in the future. + /// If successful, the updated ACL are stored in Tobira without waiting for an upcoming sync - however + /// this means it might get overwritten again if the update in Opencast failed for some reason. + /// This solution should be improved in the future. + async fn update_event_acl(id: Id, acl: Vec, context: &Context) -> ApiResult { + AuthorizedEvent::update_acl(id, acl, context).await + } + /// Sets the order of all children of a specific realm. /// /// `childIndices` must contain at least one element, i.e. do not call this /// for realms without children. - #[graphql( - arguments( - child_indices(default = None), - ) - )] async fn set_child_order( parent: Id, child_order: RealmOrder, + #[graphql(default = None)] child_indices: Option>, context: &Context, ) -> ApiResult { @@ -232,6 +237,45 @@ impl Mutation { BlockValue::remove(id, context).await } + /// Basically `mkdir -p` for realms: makes sure the given realm lineage + /// exists, creating the missing realms. Existing realms are *not* updated. + /// Each realm in the given list is the sub-realm of the previous item in + /// the list. The first item is sub-realm of the root realm. + async fn create_realm_lineage( + realms: Vec, + context: &Context, + ) -> ApiResult { + Realm::create_lineage(realms, context).await + } + + /// Stores series information in Tobira's DB, so it can be mounted without having to be harvested first. + async fn announce_series(series: NewSeries, context: &Context) -> ApiResult { + Series::announce(series, context).await + } + + /// Adds a series block to an empty realm and makes that realm derive its name from said series. + async fn add_series_mount_point( + series_oc_id: String, + target_path: String, + context: &Context, + ) -> ApiResult { + Series::add_mount_point(series_oc_id, target_path, context).await + } + + /// Removes the series block of given series from the given realm. + /// If the realm has sub-realms and used to derive its name from the block, + /// it is renamed to its path segment. If the realm has no sub-realms, + /// it is removed completely. + /// Errors if the given realm does not have exactly one series block referring to the + /// specified series. + async fn remove_series_mount_point( + series_oc_id: String, + path: String, + context: &Context, + ) -> ApiResult { + Series::remove_mount_point(series_oc_id, path, context).await + } + /// Atomically mount a series into an (empty) realm. /// Creates all the necessary realms on the path to the target /// and adds a block with the given series at the leaf. @@ -242,68 +286,6 @@ impl Mutation { new_realms: Vec, context: &Context, ) -> ApiResult { - // Note: This is a rather ad hoc, use-case specific compound mutation. - // So for the sake of simplicity and being able to change it fast - // we just reuse all the mutations we already have. - // Once this code stabilizes, we might want to change that, - // because doing it like this duplicates some work - // like checking moderator rights, input validity, etc. - - if context.auth != AuthContext::TrustedExternal { - return Err(not_authorized!("only trusted external applications can use this mutation")); - } - - if new_realms.iter().rev().skip(1).any(|r| r.name.is_none()) { - return Err(invalid_input!("all new realms except the last need to have a name")); - } - - let parent_realm = Realm::load_by_path(parent_realm_path, context) - .await? - .ok_or_else(|| invalid_input!("`parentRealmPath` does not refer to a valid realm"))?; - - if new_realms.is_empty() { - let blocks = BlockValue::load_for_realm(parent_realm.key, context).await?; - if !blocks.is_empty() { - return Err(invalid_input!("series can only be mounted in empty realms")); - } - } - - let series = Series::create(series, context).await?; - - let target_realm = { - let mut target_realm = parent_realm; - for RealmSpecifier { name, path_segment } in new_realms { - target_realm = Realm::add(NewRealm { - // The `unwrap_or` case is only potentially used for the - // last realm, which is renamed below anyway. See the check - // above. - name: name.unwrap_or_else(|| "temporary-dummy-name".into()), - path_segment, - parent: Id::realm(target_realm.key), - }, context).await? - } - target_realm - }; - - BlockValue::add_series( - Id::realm(target_realm.key), - 0, - NewSeriesBlock { - series: series.id(), - show_title: false, - show_metadata: true, - order: VideoListOrder::NewToOld, - layout: VideoListLayout::Gallery, - }, - context, - ).await?; - - let block = &BlockValue::load_for_realm(target_realm.key, context).await?[0]; - - Realm::rename( - target_realm.id(), - UpdatedRealmName::from_block(block.id()), - context, - ).await + Series::mount(series, parent_realm_path, new_realms, context).await } } diff --git a/backend/src/api/query.rs b/backend/src/api/query.rs index af3b8952d..127f57e17 100644 --- a/backend/src/api/query.rs +++ b/backend/src/api/query.rs @@ -118,7 +118,7 @@ impl Query { /// /// - Events that the user has write access to (listed & unlisted). /// - If `writable_only` is false, this also searches through videos that - /// the user has read access to. However, unless the user has the + /// the user has preview access to. However, unless the user has the /// privilege to find unlisted events, only listed ones are searched. async fn search_all_events( query: String, diff --git a/backend/src/api/util.rs b/backend/src/api/util.rs index ed2b16898..3e945a1ba 100644 --- a/backend/src/api/util.rs +++ b/backend/src/api/util.rs @@ -1,12 +1,3 @@ -use std::{collections::HashMap, fmt}; - -use bytes::BytesMut; -use fallible_iterator::FallibleIterator; -use postgres_types::{FromSql, ToSql}; - -use crate::prelude::*; - - macro_rules! impl_object_with_dummy_field { ($ty:ident) => { @@ -22,78 +13,3 @@ macro_rules! impl_object_with_dummy_field { } pub(crate) use impl_object_with_dummy_field; - - - - -#[derive(Debug)] -pub struct TranslatedString(pub(crate) HashMap); - -impl + fmt::Debug> ToSql for TranslatedString { - fn to_sql( - &self, - _: &postgres_types::Type, - out: &mut BytesMut, - ) -> Result> { - let values = self.0.iter().map(|(k, v)| (k.as_ref(), Some(&**v))); - postgres_protocol::types::hstore_to_sql(values, out)?; - Ok(postgres_types::IsNull::No) - } - - fn accepts(ty: &postgres_types::Type) -> bool { - ty.name() == "hstore" - } - - postgres_types::to_sql_checked!(); -} - -impl<'a> FromSql<'a> for TranslatedString { - fn from_sql( - _: &postgres_types::Type, - raw: &'a [u8], - ) -> Result> { - postgres_protocol::types::hstore_from_sql(raw)? - .map(|(k, v)| { - v.map(|v| (k.to_owned(), v.to_owned())) - .ok_or("translated label contained null value in hstore".into()) - }) - .collect() - .map(Self) - } - - fn accepts(ty: &postgres_types::Type) -> bool { - ty.name() == "hstore" - } -} - - -#[juniper::graphql_scalar( - name = "TranslatedString", - description = "A string in different languages", -)] -impl GraphQLScalar for TranslatedString -where - S: juniper::ScalarValue + From<&str> -{ - fn resolve(&self) -> juniper::Value { - use juniper::Value; - - self.0.iter() - .map(|(k, v)| (k, juniper::Value::scalar(v.clone()))) - .collect::>() - .pipe(Value::Object) - } - - fn from_input_value(value: &juniper::InputValue) -> Option { - // I did not want to waste time implementing this now, given that we - // likely never use it. - let _ = value; - todo!("TranslatedString cannot be used as input value yet") - } - - fn from_str<'a>(value: juniper::ScalarToken<'a>) -> juniper::ParseScalarResult<'a, S> { - // See `from_input_value` - let _ = value; - todo!() - } -} diff --git a/backend/src/args.rs b/backend/src/args.rs index f90def7da..f622dfb4c 100644 --- a/backend/src/args.rs +++ b/backend/src/args.rs @@ -1,6 +1,6 @@ //! This module defines the command line arguments Tobira accepts. -use std::{path::PathBuf, io::IsTerminal}; +use std::path::PathBuf; use termcolor::ColorChoice; @@ -146,10 +146,10 @@ fn parse_color_choice(s: &str) -> Result { impl Args { pub(crate) fn stdout_color(&self) -> ColorChoice { - if std::io::stdout().is_terminal() { self.color } else { ColorChoice::Never } + self.color } pub(crate) fn stderr_color(&self) -> ColorChoice { - if std::io::stderr().is_terminal() { self.color } else { ColorChoice::Never } + self.color } } diff --git a/backend/src/auth/config.rs b/backend/src/auth/config.rs index 81de638c7..d6c6b2423 100644 --- a/backend/src/auth/config.rs +++ b/backend/src/auth/config.rs @@ -1,10 +1,10 @@ use std::time::Duration; use hyper::{http::HeaderName, Uri}; -use secrecy::Secret; +use secrecy::SecretString; use serde::{Deserialize, Deserializer, de::Error}; -use crate::{config::{parse_normal_http_uri, TranslatedString}, prelude::*}; +use crate::{config::parse_normal_http_uri, model::TranslatedString, prelude::*}; use super::JwtConfig; @@ -29,7 +29,7 @@ pub(crate) struct AuthConfig { /// without having to invent a user. Note that this should be hard to /// guess, and kept secret. Specifically, you are going to want to encrypt /// every channel this is sent over. - pub(crate) trusted_external_key: Option>, + pub(crate) trusted_external_key: Option, /// Determines whether or not Tobira users are getting pre-authenticated against /// Opencast when they visit external links like the ones to Opencast Studio diff --git a/backend/src/auth/mod.rs b/backend/src/auth/mod.rs index c55442bf4..709c1840d 100644 --- a/backend/src/auth/mod.rs +++ b/backend/src/auth/mod.rs @@ -5,11 +5,14 @@ use cookie::Cookie; use deadpool_postgres::Client; use hyper::{http::HeaderValue, HeaderMap, Request, StatusCode}; use once_cell::sync::Lazy; +use regex::Regex; use secrecy::ExposeSecret; use serde::Deserialize; use tokio_postgres::Error as PgError; use crate::{ + api::err::{not_authorized, ApiError}, + config::Config, db::util::select, http::{response, Context, Response}, prelude::*, @@ -37,7 +40,28 @@ pub(crate) use self::{ /// administrator. pub(crate) const ROLE_ADMIN: &str = "ROLE_ADMIN"; -const ROLE_ANONYMOUS: &str = "ROLE_ANONYMOUS"; +/// (**ETH SPECIAL FEATURE**) +/// Role used to define username and password for series (used in events). +/// In Tobira, these are stored separately during sync and the role isn't used +/// afterwards. Therefore it should be filtered out. +pub(crate) static ETH_ROLE_CREDENTIALS_RE: Lazy = Lazy::new(|| Regex::new( + r"^ROLE_GROUP_([a-fA-F0-9]{40})_([a-fA-F0-9]{40})$" +).unwrap()); + +/// (**ETH SPECIAL FEATURE**) +/// Role used in Admin UI to show the above credentials for some series. +/// This is not used in Tobira and should be filtered out. +pub(crate) static ETH_ROLE_PASSWORD_RE: Lazy = Lazy::new(|| Regex::new( + r"^ROLE_PWD_[a-zA-Z0-9+/]*={0,2}$" +).unwrap()); + +pub(crate) fn is_special_eth_role(role: &String, config: &Config) -> bool { + config.sync.interpret_eth_passwords && ( + ETH_ROLE_CREDENTIALS_RE.is_match(role) || ETH_ROLE_PASSWORD_RE.is_match(role) + ) +} + +pub(crate) const ROLE_ANONYMOUS: &str = "ROLE_ANONYMOUS"; const ROLE_USER: &str = "ROLE_USER"; const SESSION_COOKIE: &str = "tobira-session"; @@ -103,6 +127,13 @@ impl AuthContext { Self::User(user) => format!("'{}'", user.username).into(), } } + + pub fn required_trusted_external(&self) -> Result<(), ApiError> { + if *self != Self::TrustedExternal { + return Err(not_authorized!("only trusted external applications can use this mutation")); + } + Ok(()) + } } impl User { @@ -124,6 +155,8 @@ impl User { if let Some(user) = &mut out { user.add_default_roles(); + // TODO: consider not awaiting here. The result is not important and + // we can finish the rest of the API in the meantime. ctx.auth_caches.user.upsert_user_info(user, db).await; } diff --git a/backend/src/auth/session_id.rs b/backend/src/auth/session_id.rs index aee013d18..87a0ae324 100644 --- a/backend/src/auth/session_id.rs +++ b/backend/src/auth/session_id.rs @@ -5,7 +5,7 @@ use bstr::ByteSlice; use cookie::Cookie; use hyper::{HeaderMap, header}; use postgres_types::ToSql; -use secrecy::{ExposeSecret, Secret}; +use secrecy::{ExposeSecret, SecretBox}; use std::time::Duration; use tokio_postgres::Error as PgError; @@ -19,7 +19,7 @@ use super::{SESSION_COOKIE, base64encode}; const LENGTH: usize = 18; /// A session ID (random bytes). -pub(crate) struct SessionId(pub(crate) Secret<[u8; LENGTH]>); +pub(crate) struct SessionId(pub(crate) SecretBox<[u8; LENGTH]>); impl SessionId { /// Creates a new, random session ID. @@ -49,7 +49,7 @@ impl SessionId { let mut bytes = [0; LENGTH]; base64::engine::general_purpose::URL_SAFE.decode_slice(v, &mut bytes).ok()?; - Some(Self(Secret::new(bytes))) + Some(Self(SecretBox::new(Box::new(bytes)))) }) } diff --git a/backend/src/cmd/check.rs b/backend/src/cmd/check.rs index d2676e10e..86a5afd65 100644 --- a/backend/src/cmd/check.rs +++ b/backend/src/cmd/check.rs @@ -103,13 +103,8 @@ fn print_outcome(any_errors: &mut bool, label: &str, result: &Result) { async fn check_referenced_files(config: &Config) -> Result<()> { // TODO: log file & unix socket? - let mut files = vec![ - &config.theme.favicon, - &config.theme.logo.large.path, - ]; - files.extend(config.theme.logo.small.as_ref().map(|l| &l.path)); - files.extend(config.theme.logo.large_dark.as_ref().map(|l| &l.path)); - files.extend(config.theme.logo.small_dark.as_ref().map(|l| &l.path)); + let mut files = vec![&config.theme.favicon]; + files.extend(config.theme.logos.iter().map(|logo| &logo.path)); files.extend(&config.theme.font.files); files.extend(&config.theme.font.extra_css); files.extend(config.auth.jwt.secret_key.as_ref()); diff --git a/backend/src/cmd/export_api_schema.rs b/backend/src/cmd/export_api_schema.rs index 79c83eaa5..fa4f61fc5 100644 --- a/backend/src/cmd/export_api_schema.rs +++ b/backend/src/cmd/export_api_schema.rs @@ -21,7 +21,7 @@ const PRELUDE: &str = "\ "; pub(crate) fn run(args: &Args) -> Result<()> { - let schema = format!("{}\n{}", PRELUDE, crate::api::root_node().as_schema_language()); + let schema = format!("{}\n{}", PRELUDE, crate::api::root_node().as_sdl()); if let Some(target) = &args.output { if let Some(parent) = Path::new(&target).parent() { diff --git a/backend/src/cmd/known_groups.rs b/backend/src/cmd/known_groups.rs index c4e350528..ba54a2d02 100644 --- a/backend/src/cmd/known_groups.rs +++ b/backend/src/cmd/known_groups.rs @@ -5,10 +5,11 @@ use postgres_types::ToSql; use serde_json::json; use crate::{ - prelude::*, - db, - api::{util::TranslatedString, model::known_roles::KnownGroup}, + api::model::known_roles::KnownGroup, config::Config, + model::TranslatedString, + db, + prelude::*, }; use super::prompt_for_yes; @@ -25,7 +26,7 @@ pub(crate) enum Args { /// /// { /// "ROLE_LECTURER": { - /// "label": { "en": "Lecturer", "de": "Vortragende" }, + /// "label": { "default": "Lecturer", "de": "Vortragende" }, /// "implies": ["ROLE_STAFF"], /// "large": true /// } @@ -70,7 +71,7 @@ fn print_group(group: &KnownGroup) { print!(r#" {}: {{ "label": {{"#, json!(group.role)); // Sort by key to get consistent ordering (hashmap order is random). - let mut labels = group.label.0.iter().collect::>(); + let mut labels = group.label.iter().collect::>(); labels.sort(); for (lang, label) in labels { print!(" {}: {}", json!(lang), json!(label)); @@ -112,10 +113,7 @@ async fn upsert(file: &str, config: &Config, tx: Transaction<'_>) -> Result<()> .context("failed to deserialize")?; // Validate - for (role, info) in &groups { - if info.label.is_empty() { - bail!("No label given for {}", role.0); - } + for role in groups.keys() { if config.auth.is_user_role(&role.0) { bail!("Role '{}' is a user role according to 'auth.user_role_prefixes'. \ This should be added as user, not as group.", role.0); @@ -131,7 +129,7 @@ async fn upsert(file: &str, config: &Config, tx: Transaction<'_>) -> Result<()> label = excluded.label, \ implies = excluded.implies, \ large = excluded.large"; - tx.execute(sql, &[&role, &TranslatedString(info.label), &info.implies, &info.large]).await?; + tx.execute(sql, &[&role, &info.label, &info.implies, &info.large]).await?; } tx.commit().await?; @@ -185,7 +183,7 @@ async fn clear(tx: Transaction<'_>) -> Result<()> { #[derive(serde::Deserialize)] struct GroupData { - label: HashMap, + label: TranslatedString, #[serde(default)] implies: Vec, @@ -193,29 +191,6 @@ struct GroupData { large: bool, } -#[derive(Debug, serde::Deserialize, PartialEq, Eq, Hash)] -#[serde(try_from = "&str")] -struct LangCode([u8; 2]); - -impl<'a> TryFrom<&'a str> for LangCode { - type Error = &'static str; - - fn try_from(v: &'a str) -> std::result::Result { - if !(v.len() == 2 && v.chars().all(|c| c.is_ascii_alphabetic())) { - return Err("invalid language code: two ASCII letters expected"); - } - - let bytes = v.as_bytes(); - Ok(Self([bytes[0], bytes[1]])) - } -} - -impl AsRef for LangCode { - fn as_ref(&self) -> &str { - std::str::from_utf8(&self.0).unwrap() - } -} - #[derive(Debug, serde::Deserialize, PartialEq, Eq, Hash, ToSql)] #[serde(try_from = "String")] #[postgres(transparent)] diff --git a/backend/src/cmd/known_users.rs b/backend/src/cmd/known_users.rs index 38618724b..38f5b3a3a 100644 --- a/backend/src/cmd/known_users.rs +++ b/backend/src/cmd/known_users.rs @@ -2,7 +2,6 @@ use std::collections::{HashMap, HashSet}; use deadpool_postgres::Transaction; use futures::pin_mut; -use tokio_postgres::binary_copy::BinaryCopyInWriter; use crate::{ prelude::*, @@ -97,16 +96,11 @@ async fn upsert(file: &str, config: &Config, tx: Transaction<'_>) -> Result<()> on commit drop"); tx.execute(&sql, &[]).await?; - let col_list = "username, display_name, email, user_role"; - let sql = format!("insert into users ({col_list}) values ($1, $2, $3, $4)"); - let col_types = tx.prepare_cached(&sql).await?; - debug!("Prepared DB insertion"); - - let sink = tx.copy_in(&format!("copy {tmp_table} ({col_list}) from stdin binary")).await?; - let writer = BinaryCopyInWriter::new(sink, col_types.params()); + let columns = ["username", "display_name", "email", "user_role"]; + let col_list = columns.join(", "); + let writer = db::util::bulk_insert(&tmp_table, &columns, &tx).await?; pin_mut!(writer); - for (role, info) in users { writer.as_mut().write_raw(dbargs![ &info.username, diff --git a/backend/src/config/general.rs b/backend/src/config/general.rs index b10fa2613..91c4f8272 100644 --- a/backend/src/config/general.rs +++ b/backend/src/config/general.rs @@ -1,6 +1,7 @@ use std::collections::HashMap; -use super::{HttpHost, TranslatedString}; +use crate::model::TranslatedString; +use super::HttpHost; #[derive(Debug, confique::Config)] @@ -12,7 +13,7 @@ pub(crate) struct GeneralConfig { /// Public URL to Tobira (without path). /// Used for RSS feeds, as those require specifying absolute URLs to resources. - /// + /// /// Example: "https://tobira.my-uni.edu". pub(crate) tobira_url: HttpHost, @@ -22,16 +23,16 @@ pub(crate) struct GeneralConfig { /// These can be specified in multiple languages. /// Consent is prompted upon first use and only if this is configured. It is /// re-prompted when any of these values change. - /// + /// /// We recommend not to configure this unless absolutely necessary, /// in order to not degrade the user experience needlessly. - /// + /// /// Example: - /// + /// /// ``` - /// initial_consent.title.en = "Terms & Conditions" - /// initial_consent.button.en = "Agree" - /// initial_consent.text.en = """ + /// initial_consent.title.default = "Terms & Conditions" + /// initial_consent.button.default = "Agree" + /// initial_consent.text.default = """ /// To use Tobira, you need to agree to our terms and conditions: /// - [Terms](https://www.our-terms.de) /// - [Conditions](https://www.our-conditions.de) @@ -49,13 +50,13 @@ pub(crate) struct GeneralConfig { /// add custom ones. Note that these two default links are special and can /// be specified with only the shown string. To add custom ones, you need /// to define a label and a link. The link is either the same for every language - /// or can be specified for each language in the same manner as the label. + /// or can be specified for each language in the same manner as the label. /// Example: /// /// ``` /// footer_links = [ - /// { label = { en = "Example 1" }, link = "https://example.com" }, - /// { label = { en = "Example 2" }, link = { en = "https://example.com/en" } }, + /// { label = { default = "Example 1" }, link = "https://example.com" }, + /// { label = { default = "Example 2" }, link = { default = "https://example.com/en" } }, /// "about", /// ] /// ``` @@ -65,8 +66,8 @@ pub(crate) struct GeneralConfig { /// Additional metadata that is shown below a video. Example: /// /// [general.metadata] - /// dcterms.spatial = { en = "Location", de = "Ort" } - /// "http://my.domain/xml/namespace".courseLink = { en = "Course", de = "Kurs"} + /// dcterms.spatial = { default = "Location", de = "Ort" } + /// "http://my.domain/xml/namespace".courseLink = { default = "Course", de = "Kurs"} /// /// As you can see, this is a mapping of a metadata location (the XML /// namespace and the name) to a translated label. For the XML namespace @@ -110,6 +111,23 @@ pub(crate) struct GeneralConfig { /// (partial) name. #[config(default = false)] pub users_searchable: bool, + + /// This allows users to edit the ACL of events they have write access for. + /// Doing so will update these in Opencast and start the `republish-metadata` + /// workflow to propagate the changes to other publications as well. + /// Instead of waiting for the workflow however, Tobira will also immediately + /// store the updated ACL in its database. + /// + /// Note that this might lead to situations where the event ACL in Tobira is different + /// from that in other publications, mainly if the afore mentioned workflow fails + /// or takes an unusually long time to complete. + #[config(default = true)] + pub allow_acl_edit: bool, + + /// Activating this will disable ACL editing for events that are part of a series. + /// For the uploader, this means that the ACL of the series will be used. + #[config(default = false)] + pub lock_acl_to_series: bool, } const INTERNAL_RESERVED_PATHS: &[&str] = &["favicon.ico", "robots.txt", ".well-known"]; diff --git a/backend/src/config/mod.rs b/backend/src/config/mod.rs index 4ecc56e94..bc3c08688 100644 --- a/backend/src/config/mod.rs +++ b/backend/src/config/mod.rs @@ -12,14 +12,12 @@ use crate::prelude::*; mod color; mod general; mod theme; -mod translated_string; mod matomo; mod opencast; mod player; mod upload; pub(crate) use self::{ - translated_string::TranslatedString, theme::{ThemeConfig, LogoDef}, matomo::MatomoConfig, opencast::OpencastConfig, @@ -50,11 +48,12 @@ const TOBIRA_CONFIG_PATH_ENV: &str = "TOBIRA_CONFIG_PATH"; /// units: 'ms', 's', 'min', 'h' and 'd'. /// /// All user-facing texts you can configure here have to be specified per -/// language, with two letter language key. Only English ('en') is required. -/// Take `general.site_title` for example: +/// language, with two letter language key. The special key 'default' is +/// required and used as fallback for languages that are not specified +/// explicitly. Take `general.site_title` for example: /// /// [general] -/// site_title.en = "My university" +/// site_title.default = "My university" /// site_title.de = "Meine Universität" /// #[derive(Debug, confique::Config)] @@ -161,14 +160,7 @@ impl Config { fix_path(&base, p); } - fix_path(&base, &mut self.theme.logo.large.path); - if let Some(logo) = &mut self.theme.logo.small { - fix_path(&base, &mut logo.path); - } - if let Some(logo) = &mut self.theme.logo.large_dark { - fix_path(&base, &mut logo.path); - } - if let Some(logo) = &mut self.theme.logo.small_dark { + for logo in &mut self.theme.logos { fix_path(&base, &mut logo.path); } fix_path(&base, &mut self.theme.favicon); diff --git a/backend/src/config/opencast.rs b/backend/src/config/opencast.rs index d859ba817..30e50f248 100644 --- a/backend/src/config/opencast.rs +++ b/backend/src/config/opencast.rs @@ -1,6 +1,8 @@ use std::{str::FromStr, fmt}; use hyper::Uri; +use base64::Engine as _; +use secrecy::{ExposeSecret as _, SecretString}; use serde::Deserialize; use crate::{ @@ -45,6 +47,20 @@ pub(crate) struct OpencastConfig { /// /// Example: "https://admin.oc.my-uni.edu/editor-ui/index.html". pub(crate) editor_url: Option, + + /// Extra Opencast hosts not listed in any other value above, that can also + /// be trusted. + #[config(default = [])] + pub(crate) other_hosts: Vec, + + /// Username of the user used to communicate with Opencast for data syncing + /// and external API authentication. + /// This user has to have access to all events and series. Currently, that + /// user has to be admin. + pub user: String, + + /// Password of the user used to communicate with Opencast. + password: SecretString, } impl OpencastConfig { @@ -107,6 +123,13 @@ impl OpencastConfig { }) } + pub(crate) fn basic_auth_header(&self) -> SecretString { + let credentials = format!("{}:{}", self.user, self.password.expose_secret()); + let encoded_credentials = base64::engine::general_purpose::STANDARD.encode(credentials); + let auth_header = format!("Basic {}", encoded_credentials); + SecretString::new(auth_header.into()) + } + fn unwrap_host(&self) -> &HttpHost { self.host.as_ref().expect("Neither 'opencast.host' nor override host set!") } diff --git a/backend/src/config/theme.rs b/backend/src/config/theme.rs index 456ee8cfe..2d1dbebb4 100644 --- a/backend/src/config/theme.rs +++ b/backend/src/config/theme.rs @@ -1,6 +1,8 @@ -use std::{path::PathBuf, fmt}; +use std::{collections::HashMap, fmt, path::PathBuf}; +use serde::{Deserialize, Serialize}; -use super::color::ColorConfig; +use crate::model::LangKey; +use super::{color::ColorConfig}; #[derive(Debug, confique::Config)] @@ -10,14 +12,25 @@ pub(crate) struct ThemeConfig { #[config(default = 85)] pub(crate) header_height: u32, - /// Logo used in the top left corner of the page. Using SVG logos is recommended. - /// See the documentation on theming/logos for more info! - #[config(nested)] - pub(crate) logo: LogoConfig, - /// Path to an SVG file that is used as favicon. pub(crate) favicon: PathBuf, + /// Logo used in the top left corner of the page. Using SVG logos is recommended. + /// You can configure specific logos for small and large screens, dark and light mode, + /// and any number of languages. Example: + /// + /// ``` + /// logos = [ + /// { path = "logo-wide-light.svg", mode = "light", size = "wide", resolution = [425, 182] }, + /// { path = "logo-wide-dark.svg", mode = "dark", size = "wide", resolution = [425, 182] }, + /// { path = "logo-small.svg", size = "narrow", resolution = [212, 182] }, + /// ] + /// ``` + /// + /// See the documentation on theming/logos for more info and additional examples! + #[config(validate = validate_logos)] + pub(crate) logos: Vec, + /// Colors used in the UI. Specified in sRGB. #[config(nested)] pub(crate) color: ColorConfig, @@ -26,36 +39,42 @@ pub(crate) struct ThemeConfig { pub(crate) font: FontConfig, } - -#[derive(Debug, confique::Config)] -pub(crate) struct LogoConfig { - /// The normal, usually wide logo that is shown on desktop screens. The - /// value is a map with a `path` and `resolution` key: - /// - /// large = { path = "logo.svg", resolution = [20, 8] } - /// - /// The resolution is only an aspect ratio. It is used to avoid layout - /// shifts in the frontend by allocating the correct size for the logo - /// before the browser loaded the file. - pub(crate) large: LogoDef, - - /// A less wide logo used for narrow screens. - pub(crate) small: Option, - - /// Large logo for dark mode usage. - pub(crate) large_dark: Option, - - /// Small logo for dark mode usage. - pub(crate) small_dark: Option, -} - #[derive(Debug, Clone, serde::Deserialize)] pub(crate) struct LogoDef { + pub(crate) size: Option, + pub(crate) mode: Option, + pub(crate) lang: Option, pub(crate) path: PathBuf, pub(crate) resolution: LogoResolution, } -#[derive(Clone, serde::Serialize, serde::Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)] +#[serde(rename_all = "lowercase")] +pub(crate) enum LogoSize { + Wide, + Narrow, +} + +impl fmt::Display for LogoSize { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.serialize(f) + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)] +#[serde(rename_all = "lowercase")] +pub(crate) enum LogoMode { + Light, + Dark, +} + +impl fmt::Display for LogoMode { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.serialize(f) + } +} + +#[derive(Clone, Serialize, Deserialize)] pub(crate) struct LogoResolution(pub(crate) [u32; 2]); impl fmt::Debug for LogoResolution { @@ -128,3 +147,74 @@ impl ThemeConfig { out } } + +fn validate_logos(logos: &Vec) -> Result<(), String> { + #[derive()] + enum LangLogo { + Universal(usize), + LangSpecific(HashMap), + } + + let all_modes = [LogoMode::Light, LogoMode::Dark]; + let all_sizes = [LogoSize::Wide, LogoSize::Narrow]; + + let mut cases = HashMap::new(); + for (i, logo) in logos.iter().enumerate() { + let modes = logo.mode.map(|m| vec![m]).unwrap_or(all_modes.to_vec()); + let sizes = logo.size.map(|s| vec![s]).unwrap_or(all_sizes.to_vec()); + + for &mode in &modes { + for &size in &sizes { + let key = (mode, size); + + if let Some(entry) = cases.get_mut(&key) { + let conflicting = match (entry, &logo.lang) { + (LangLogo::LangSpecific(m), Some(lang)) => m.insert(lang.clone(), i), + (LangLogo::LangSpecific(m), None) => m.values().next().copied(), + (LangLogo::Universal(c), _) => Some(*c), + }; + + if let Some(conflicting) = conflicting { + return Err(format!( + "ambiguous logo definition: \ + entry {i} (path: '{curr_path}') conflicts with \ + entry {prev_index} (path: '{prev_path}'). \ + Both define a {mode} {size} logo, which is only allowed \ + if both have different 'lang' keys! Consider adding 'mode' \ + or 'size' fields to make entries more specific.", + i = i + 1, + prev_index = conflicting + 1, + curr_path = logo.path.display(), + prev_path = logos[conflicting].path.display(), + )); + } + } else { + cases.insert(key, match &logo.lang { + Some(lang) => LangLogo::LangSpecific(HashMap::from([(lang.clone(), i)])), + None => LangLogo::Universal(i), + }); + } + } + } + } + + // Check that all cases are defined + for mode in all_modes { + for size in all_sizes { + match cases.get(&(mode, size)) { + None => return Err(format!( + "incomplete logo configuration: no {mode} {size} logo defined", + )), + Some(LangLogo::LangSpecific(m)) if !m.contains_key(&LangKey::Default) => { + return Err(format!( + "incomplete logo configuration: {mode} {size} logo is \ + missing `lang = '*'` entry", + )); + } + _ => {} + } + } + } + + Ok(()) +} diff --git a/backend/src/config/translated_string.rs b/backend/src/config/translated_string.rs deleted file mode 100644 index a1306e066..000000000 --- a/backend/src/config/translated_string.rs +++ /dev/null @@ -1,56 +0,0 @@ -use std::{collections::HashMap, fmt}; -use serde::Deserialize; - - -/// A configurable string specified in different languages. Language 'en' always -/// has to be specified. -#[derive(serde::Serialize, Clone)] -pub(crate) struct TranslatedString(HashMap); - -impl TranslatedString { - pub(crate) const LANGUAGES: &'static [&'static str] = &["en", "de"]; - - pub(crate) fn en(&self) -> &str { - &self.0["en"] - } -} - -impl<'de> Deserialize<'de> for TranslatedString { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - use serde::de::Error; - - let map = >::deserialize(deserializer).map_err(|e| { - D::Error::custom(format!( - "invalid translated string, expected object with keys 'en', 'de', ... ({})", - e, - )) - })?; - - // Make sure only valid languages are specified - if let Some(invalid) = map.keys().find(|key| !Self::LANGUAGES.contains(&key.as_str())) { - return Err(D::Error::custom(format!( - "'{}' is not a valid language key for translated string (valid keys: {:?})", - invalid, - Self::LANGUAGES, - ))); - } - - if !map.contains_key("en") { - return Err(D::Error::custom( - "translated string not specified for language 'en', but it has to be" - )); - } - - Ok(Self(map)) - } -} - -impl fmt::Debug for TranslatedString { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("TranslatedString ")?; - f.debug_map().entries(self.0.iter()).finish() - } -} diff --git a/backend/src/db/migrations.rs b/backend/src/db/migrations.rs index 0a4f70abc..48c94271f 100644 --- a/backend/src/db/migrations.rs +++ b/backend/src/db/migrations.rs @@ -370,4 +370,7 @@ static MIGRATIONS: Lazy> = include_migrations![ 35: "playlists", 36: "playlist-blocks", 37: "redo-search-triggers-and-listed", + 38: "event-texts", + 39: "preview-roles-and-credentials", + 40: "realm-names-constraint-revision", ]; diff --git a/backend/src/db/migrations/38-event-texts.sql b/backend/src/db/migrations/38-event-texts.sql new file mode 100644 index 000000000..8073ca1ff --- /dev/null +++ b/backend/src/db/migrations/38-event-texts.sql @@ -0,0 +1,161 @@ +-- Adds tables, types and triggers to manage texts detected inside videos, e.g. +-- OCR'ed slide texts or subtitles. + + +-- A text paired with a timespan in a video. Both, `start` and `end`, are +-- specified in ms from the start of the video. +create type timespan_text as ( + span_start bigint, + span_end bigint, + t text +); + +create type text_asset_type as enum ('caption', 'slide-text'); + + +-- This table stores the parsed texts we fetched from Opencast. +create table event_texts ( + uri text not null, + event_id bigint not null + references all_events on delete cascade, + ty text_asset_type not null, + texts timespan_text[] not null, + + -- When the file was fetched & parsed. + fetch_time timestamp with time zone not null, + + -- In theory the uri should be unique already, but just in case Opencast + -- returns the same asset URL for two different events... + primary key (uri, event_id) +); + +create index idx_event_texts_event on event_texts (event_id); + + +-- This table acts as a queue where events are pushed whenever they need +-- (re)fetching of text-based resources. +create table event_texts_queue ( + event_id bigint primary key + references all_events on delete cascade, + + -- Timestamp before which this queue entry should not be processed. + fetch_after timestamp with time zone not null, + + -- How many times Tobira already unsuccesfully tried to fetch attachments + -- for this event. + retry_count int not null +); + +create index idx_event_texts_queue_fetch_after on event_texts_queue (fetch_after); + +-- Insert all events into the queue +insert into event_texts_queue (event_id, fetch_after, retry_count) +select id, updated, 0 + from events + where array_length(captions, 1) > 0 or slide_text is not null; + +-- Create triggers to automatically enqueue events when they are inserted or +-- updated. In the latter case, it's not sufficient to only act when the +-- captions or slide_text field has changed: the URL could stay the same but +-- the contents can change. Whenever the `updated` field is changed, the files +-- could have changed as well. +create function queue_event_for_text_extract() + returns trigger + language plpgsql +as $$ +begin + insert into event_texts_queue (event_id, fetch_after, retry_count) + values (new.id, new.updated, 0) + on conflict(event_id) do update set + retry_count = 0, + fetch_after = new.updated; + return null; +end; +$$; + +create trigger queue_event_for_text_extract_on_insert +after insert +on all_events for each row +when (array_length(new.captions, 1) > 0 or new.slide_text is not null) +execute procedure queue_event_for_text_extract(); + +create trigger queue_event_for_text_extract_on_update +after update of updated, slide_text, captions +on all_events for each row +execute procedure queue_event_for_text_extract(); + + +-- This is almost the same definition as in migration `37`, only the `texts` +-- selected column was added. +create or replace view search_events as + select + events.id, events.opencast_id, events.state, + events.series, series.title as series_title, + events.title, events.description, events.creators, + events.thumbnail, events.duration, + events.is_live, events.updated, events.created, events.start_time, events.end_time, + events.read_roles, events.write_roles, + coalesce( + array_agg( + distinct + row(search_realms.*)::search_realms + ) filter(where search_realms.id is not null), + '{}' + ) as host_realms, + is_audio_only(events.tracks) as audio_only, + coalesce( + array_agg(playlists.id) + filter(where playlists.id is not null), + '{}' + ) as containing_playlists, + ( + select array_agg(t) + from ( + select unnest(texts) as t + from event_texts + where event_id = events.id and ty = 'slide-text' + ) as subquery + ) as slide_texts, + ( + select array_agg(t) + from ( + select unnest(texts) as t + from event_texts + where event_id = events.id and ty = 'caption' + ) as subquery + ) as caption_texts + from all_events as events + left join series on events.series = series.id + -- This syntax instead of `foo = any(...)` to use the index, which is not + -- otherwise used. + left join playlists on array[events.opencast_id] <@ event_entry_ids(entries) + left join blocks on ( + type = 'series' and blocks.series = events.series + or type = 'video' and blocks.video = events.id + or type = 'playlist' and blocks.playlist = playlists.id + ) + left join search_realms on search_realms.id = blocks.realm + group by events.id, series.id; + + +-- Add triggers to queue events for search indexing when their texts change. + +create function queue_event_for_search_after_text_update() + returns trigger + language plpgsql +as $$ +begin + insert into search_index_queue (item_id, kind) + select old.event_id, 'event'::search_index_item_kind where tg_op <> 'INSERT' + union all + select new.event_id, 'event'::search_index_item_kind where tg_op <> 'DELETE' + on conflict do nothing; + return null; +end; +$$; + +create trigger queue_event_for_search_after_text_update +after insert or delete or update +on event_texts +for each row +execute procedure queue_event_for_search_after_text_update(); diff --git a/backend/src/db/migrations/39-preview-roles-and-credentials.sql b/backend/src/db/migrations/39-preview-roles-and-credentials.sql new file mode 100644 index 000000000..565f53b39 --- /dev/null +++ b/backend/src/db/migrations/39-preview-roles-and-credentials.sql @@ -0,0 +1,75 @@ +-- Adds columns `preview_roles` and `credentials`. + +-- Users with a preview role may only view text metadata of an event. +-- Any other action will require read or write roles (these imply preview rights). + +-- `credentials` is an optional column which holds a user/group name and a corresponding +-- password. If set, users need these credentials to gain read access to an event. +create type credentials as ( + name text, -- as `:` + password text -- as `:` +); + +alter table all_events + add column credentials credentials, + add column preview_roles text[] not null default '{}'; + +alter table series + add column credentials credentials; + + +-- replace outdated view to include new columnes +create or replace view events as select * from all_events where tobira_deletion_timestamp is null; + +-- add `preview_roles` and `has_password` to `search_events` view +drop view search_events; +create view search_events as + select + events.id, events.opencast_id, events.state, + events.series, series.title as series_title, + events.title, events.description, events.creators, + events.thumbnail, events.duration, + events.is_live, events.updated, events.created, events.start_time, events.end_time, + events.read_roles, events.write_roles, events.preview_roles, + coalesce( + array_agg( + distinct + row(search_realms.*)::search_realms + ) filter(where search_realms.id is not null), + '{}' + ) as host_realms, + is_audio_only(events.tracks) as audio_only, + coalesce( + array_agg(playlists.id) + filter(where playlists.id is not null), + '{}' + ) as containing_playlists, + ( + select array_agg(t) + from ( + select unnest(texts) as t + from event_texts + where event_id = events.id and ty = 'slide-text' + ) as subquery + ) as slide_texts, + ( + select array_agg(t) + from ( + select unnest(texts) as t + from event_texts + where event_id = events.id and ty = 'caption' + ) as subquery + ) as caption_texts, + (events.credentials is not null) as has_password + from all_events as events + left join series on events.series = series.id + -- This syntax instead of `foo = any(...)` to use the index, which is not + -- otherwise used. + left join playlists on array[events.opencast_id] <@ event_entry_ids(entries) + left join blocks on ( + type = 'series' and blocks.series = events.series + or type = 'video' and blocks.video = events.id + or type = 'playlist' and blocks.playlist = playlists.id + ) + left join search_realms on search_realms.id = blocks.realm + group by events.id, series.id; diff --git a/backend/src/db/migrations/40-realm-names-constraint-revision.sql b/backend/src/db/migrations/40-realm-names-constraint-revision.sql new file mode 100644 index 000000000..96d6b65c2 --- /dev/null +++ b/backend/src/db/migrations/40-realm-names-constraint-revision.sql @@ -0,0 +1,10 @@ +-- Adjusts name_source constraint to allow custom names for root, including null. + +alter table realms + drop constraint valid_name_source, + add constraint valid_name_source check ( + -- Root is allowed to have no name. + (id = 0 and name is null or name_from_block is null) + -- All other realms have either a plain or derived name. + or (id <> 0 and (name is null) != (name_from_block is null)) + ); diff --git a/backend/src/db/mod.rs b/backend/src/db/mod.rs index b6fad9e4b..18562e97f 100644 --- a/backend/src/db/mod.rs +++ b/backend/src/db/mod.rs @@ -1,7 +1,7 @@ //! Database related things. use deadpool_postgres::{Config as PoolConfig, Pool, Runtime}; -use secrecy::{ExposeSecret, Secret}; +use secrecy::{ExposeSecret, SecretString}; use rustls::{ Error, DigitallySignedStruct, client::danger::{ServerCertVerifier, ServerCertVerified, HandshakeSignatureValid}, @@ -42,7 +42,7 @@ pub(crate) struct DbConfig { user: String, /// The password of the database user. - password: Secret, + password: SecretString, /// The host the database server is running on. #[config(default = "127.0.0.1")] @@ -116,7 +116,7 @@ pub(crate) type DbConnection = deadpool::managed::Object Result { let pool_config = PoolConfig { user: Some(config.user.clone()), - password: Some(config.password.expose_secret().clone()), + password: Some(config.password.expose_secret().to_owned()), host: Some(config.host.clone()), port: Some(config.port), dbname: Some(config.database.clone()), @@ -142,9 +142,12 @@ pub(crate) async fn create_pool(config: &DbConfig) -> Result { // just empty. Otherwise we load system-wide root CAs. let mut root_certs = rustls::RootCertStore::empty(); if config.tls_mode == TlsMode::On { - let system_certs = rustls_native_certs::load_native_certs() - .context("failed to load all system-wide certificates")?; + let system_cert_res = rustls_native_certs::load_native_certs(); + for e in &system_cert_res.errors { + warn!("Error while loading system certificates: {e}"); + } + let system_certs = system_cert_res.certs; let system_count = system_certs.len(); for cert in system_certs { root_certs.add(cert) diff --git a/backend/src/db/tests/mod.rs b/backend/src/db/tests/mod.rs index 70d2f184a..b576f86a3 100644 --- a/backend/src/db/tests/mod.rs +++ b/backend/src/db/tests/mod.rs @@ -1,4 +1,4 @@ -use crate::{prelude::*, db::types::Key}; +use crate::{prelude::*, model::Key}; use super::DbConfig; use self::util::TestDb; diff --git a/backend/src/db/tests/search_queue.rs b/backend/src/db/tests/search_queue.rs index 805351ff7..16a0627d8 100644 --- a/backend/src/db/tests/search_queue.rs +++ b/backend/src/db/tests/search_queue.rs @@ -1,4 +1,4 @@ -use crate::{prelude::*, db::types::Key, search::IndexItemKind}; +use crate::{prelude::*, model::Key, search::IndexItemKind}; use super::util::TestDb; diff --git a/backend/src/db/tests/util.rs b/backend/src/db/tests/util.rs index eb1f9a616..a4531deb4 100644 --- a/backend/src/db/tests/util.rs +++ b/backend/src/db/tests/util.rs @@ -2,7 +2,7 @@ use std::{ops::Deref, collections::HashSet}; use secrecy::ExposeSecret; use tokio_postgres::{Client, NoTls}; -use crate::{prelude::*, db::types::Key, search::IndexItemKind}; +use crate::{prelude::*, model::Key, search::IndexItemKind}; use super::DbConfig; diff --git a/backend/src/db/types.rs b/backend/src/db/types.rs index 7af234d3c..961c3a70d 100644 --- a/backend/src/db/types.rs +++ b/backend/src/db/types.rs @@ -1,52 +1,12 @@ -use std::{fmt, collections::HashMap}; +use std::collections::HashMap; use bytes::BytesMut; +use chrono::{DateTime, Utc}; use juniper::GraphQLEnum; use postgres_types::{FromSql, ToSql}; use serde::{Deserialize, Serialize}; - -/// Our primary database ID type, which we call "key". In the database, it's a -/// `bigint` (`i64`), but we have a separate Rust type for it for several -/// reasons. Implements `ToSql` and `FromSql` by casting to/from `i64`. -#[derive(Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] -pub(crate) struct Key(pub(crate) u64); - -impl ToSql for Key { - fn to_sql( - &self, - ty: &postgres_types::Type, - out: &mut BytesMut, - ) -> Result> { - (self.0 as i64).to_sql(ty, out) - } - - fn accepts(ty: &postgres_types::Type) -> bool { - ::accepts(ty) - } - - postgres_types::to_sql_checked!(); -} - -impl<'a> FromSql<'a> for Key { - fn from_sql( - ty: &postgres_types::Type, - raw: &'a [u8], - ) -> Result> { - i64::from_sql(ty, raw).map(|i| Key(i as u64)) - } - - fn accepts(ty: &postgres_types::Type) -> bool { - ::accepts(ty) - } -} - -impl fmt::Debug for Key { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut buf = [0; 11]; - write!(f, "Key({} :: {})", self.0 as i64, self.to_base64(&mut buf)) - } -} +use crate::model::Key; /// Represents the `event_track` type defined in `5-events.sql`. @@ -75,6 +35,14 @@ pub struct EventCaption { pub lang: Option, } +#[derive(Debug, ToSql)] +#[postgres(name = "event_texts_queue")] +pub struct EventTextsQueueRecord { + pub event_id: Key, + pub fetch_after: DateTime, + pub retry_count: i32, +} + /// Represents the `event_state` type defined in `05-events.sql`. #[derive(Debug, Clone, Copy, PartialEq, Eq, FromSql, ToSql)] #[postgres(name = "event_state")] @@ -125,57 +93,25 @@ pub struct SearchThumbnailInfo { pub read_roles: Vec, } -/// Represents extra metadata in the DB. Is a map from "namespace" to a -/// `string -> string array` map. -/// -/// Each namespace key is a URL pointing to an XML namespace definition OR -/// `"dcterms"` for the dc terms (most common namespace). The value for each -/// namespace is a simple string-key map where each value is an array of string -/// values. -#[derive(Debug, Serialize, Deserialize, Default)] -#[cfg_attr(test, derive(PartialEq, Eq))] -pub(crate) struct ExtraMetadata { - /// Metadata of the dcterms - #[serde(default)] - pub(crate) dcterms: MetadataMap, - - /// Extended metadata. - #[serde(flatten)] - pub(crate) extended: HashMap, +/// Represents the `timespan_text` type. +#[derive(Debug, FromSql, ToSql)] +#[postgres(name = "timespan_text")] +pub struct TimespanText { + pub span_start: i64, + pub span_end: i64, + pub t: String, } -type MetadataMap = HashMap>; - -impl ToSql for ExtraMetadata { - fn to_sql( - &self, - ty: &postgres_types::Type, - out: &mut BytesMut, - ) -> Result> { - serde_json::to_value(self) - .expect("failed to convert `ExtraMetadata` to JSON value") - .to_sql(ty, out) - } - - fn accepts(ty: &postgres_types::Type) -> bool { - ::accepts(ty) - } - - postgres_types::to_sql_checked!(); +#[derive(Debug, Clone, Copy, PartialEq, Eq, FromSql, ToSql, GraphQLEnum)] +#[postgres(name = "text_asset_type")] +pub enum TextAssetType { + #[postgres(name = "caption")] + Caption, + #[postgres(name = "slide-text")] + SlideText, } -impl<'a> FromSql<'a> for ExtraMetadata { - fn from_sql( - ty: &postgres_types::Type, - raw: &'a [u8], - ) -> Result> { - serde_json::from_value(<_>::from_sql(ty, raw)?).map_err(Into::into) - } - fn accepts(ty: &postgres_types::Type) -> bool { - ::accepts(ty) - } -} /// Represents the type for the `custom_action_roles` field from `32-custom-actions.sql`. /// This holds a mapping of actions to lists holding roles that are allowed @@ -214,3 +150,10 @@ impl<'a> FromSql<'a> for CustomActions { ::accepts(ty) } } + +#[derive(Debug, ToSql, FromSql)] +#[postgres(name = "credentials")] +pub(crate) struct Credentials { + pub(crate) name: String, + pub(crate) password: String, +} diff --git a/backend/src/db/util.rs b/backend/src/db/util.rs index 2f34c2550..72f3da72f 100644 --- a/backend/src/db/util.rs +++ b/backend/src/db/util.rs @@ -1,6 +1,6 @@ use std::{future::Future, fmt, collections::HashMap}; use postgres_types::FromSql; -use tokio_postgres::{RowStream, Error, Row}; +use tokio_postgres::{binary_copy::BinaryCopyInWriter, Error, Row, RowStream}; use crate::prelude::*; @@ -34,6 +34,24 @@ where .await } +pub(crate) async fn bulk_insert( + table: &str, + columns: &[&str], + tx: &deadpool_postgres::Transaction<'_>, +) -> Result { + let col_list = columns.join(", "); + let placeholders = (1..=columns.len()) + .map(|i| format!("${i}")) + .collect::>() + .join(", "); + + let sql = format!("insert into {table} ({col_list}) values ({placeholders})"); + let col_types = tx.prepare_cached(&sql).await?; + + let sink = tx.copy_in(&format!("copy {table} ({col_list}) from stdin binary")).await?; + Ok(BinaryCopyInWriter::new(sink, col_types.params())) +} + // ============================================================================================== @@ -183,6 +201,7 @@ impl<'a> SqlSelection<'a> { /// For example, the column selection `${table:foo}.banana` would normally /// be emitted as `foo.banana`. To instead output just `banana`, call /// `.with_omitted_table_prefix("foo")`. + #[allow(dead_code)] pub(crate) fn with_omitted_table_prefix(mut self, table: &'a str) -> Self { self.table_renames.insert(table, None); self diff --git a/backend/src/http/assets.rs b/backend/src/http/assets.rs index c78e54050..6fbaaa475 100644 --- a/backend/src/http/assets.rs +++ b/backend/src/http/assets.rs @@ -4,7 +4,12 @@ use reinda::Embeds; use secrecy::ExposeSecret; use serde_json::json; -use crate::{auth::AuthSource, config::{Config, LogoDef}, prelude::*, util::ByteBody}; +use crate::{ + auth::AuthSource, + config::{Config,LogoDef}, + prelude::*, + util::ByteBody, +}; use super::{handlers::CommonHeadersExt, Response}; @@ -43,50 +48,43 @@ impl Assets { // // TODO: adjust file extension according to actual file path, to avoid // PNG files being served as `.svg`. - let logo_files = || [ - ("large", "logo-large.svg", Some(&config.theme.logo.large)), - ("small", "logo-small.svg", config.theme.logo.small.as_ref()), - ("largeDark", "logo-large-dark.svg", config.theme.logo.large_dark.as_ref()), - ("smallDark", "logo-small-dark.svg", config.theme.logo.small_dark.as_ref()), - ].into_iter().filter_map(|(config_field, http_path, logo)| { - logo.map(|logo| (config_field, http_path, &logo.path)) - }); + let logo_files: Vec<_> = config.theme.logos + .iter() + .map(|logo| (generate_http_path(logo), logo.path.clone())) + .collect(); let mut builder = reinda::Assets::builder(); // Add logo & favicon files builder.add_file(FAVICON_FILE, &config.theme.favicon).with_hash(); - for (_, http_path, logo_path) in logo_files() { - builder.add_file(http_path, logo_path).with_hash(); + for (http_path, logo_path) in &logo_files { + builder.add_file(http_path.clone(), logo_path.clone()).with_hash(); } - // ----- Main HTML file ----------------------------------------------------- // // We use a "modifier" to adjust the file, including the frontend // config, and in particular: refer to the correct paths (which are // potentially hashed). We also insert other variables and code. - let deps = [FAVICON_FILE, FONTS_CSS_FILE] - .into_iter() - .chain(logo_files().map(|(_, http_path, _)| http_path)); + let deps = logo_files.into_iter() + .map(|(http_path, _)| http_path) + .chain([FAVICON_FILE, FONTS_CSS_FILE].map(ToString::to_string)); builder.add_embedded(INDEX_FILE, &EMBEDS[INDEX_FILE]).with_modifier(deps, { let frontend_config = frontend_config(config); - let html_title = config.general.site_title.en().to_owned(); + let html_title = config.general.site_title.default().to_owned(); let global_style = config.theme.to_css(); let matomo_code = config.matomo.js_code().unwrap_or_default(); - let logo_paths = logo_files() - .map(|(config_field, http_path, _)| (config_field, http_path)) - .collect::>(); move |original, ctx| { - // Fill logo path in frontend config and convert it to string. let mut frontend_config = frontend_config.clone(); - for (config_field, http_path) in &logo_paths { - let actual_path = format!("/~assets/{}", ctx.resolve_path(http_path)); - frontend_config["logo"][config_field]["path"] = json!(actual_path); + for logo in frontend_config["logos"].as_array_mut().expect("logos is not an array") { + let original_path = logo["path"].as_str().unwrap(); + let resolved = ctx.resolve_path(original_path); + logo["path"] = format!("/~assets/{}", resolved).into(); } + let frontend_config = if cfg!(debug_assertions) { serde_json::to_string_pretty(&frontend_config).unwrap() } else { @@ -245,11 +243,15 @@ impl Assets { } fn frontend_config(config: &Config) -> serde_json::Value { - let logo_obj = |logo_def: &LogoDef| json!({ - // The path will be added later in the modifier - "path": "", - "resolution": logo_def.resolution, - }); + let logo_entries = config.theme.logos.iter() + .map(|logo| json!({ + "size": logo.size.as_ref().map(ToString::to_string), + "mode": logo.mode.as_ref().map(ToString::to_string), + "lang": logo.lang.as_ref().map(ToString::to_string), + "path": generate_http_path(logo), + "resolution": logo.resolution, + })) + .collect::>(); json!({ "version": { @@ -279,6 +281,8 @@ fn frontend_config(config: &Config) -> serde_json::Value { "initialConsent": config.general.initial_consent, "showDownloadButton": config.general.show_download_button, "usersSearchable": config.general.users_searchable, + "allowAclEdit": config.general.allow_acl_edit, + "lockAclToSeries": config.general.lock_acl_to_series, "footerLinks": config.general.footer_links, "metadataLabels": config.general.metadata, "paellaPluginConfig": config.player.paella_plugin_config, @@ -288,14 +292,20 @@ fn frontend_config(config: &Config) -> serde_json::Value { "studioUrl": config.opencast.studio_url().to_string(), "editorUrl": config.opencast.editor_url().to_string(), }, - "logo": { - "large": logo_obj(&config.theme.logo.large), - "small": config.theme.logo.small.as_ref().map(logo_obj), - "largeDark": config.theme.logo.large_dark.as_ref().map(logo_obj), - "smallDark": config.theme.logo.small_dark.as_ref().map(logo_obj), - }, + "logos": logo_entries, "sync": { "pollPeriod": config.sync.poll_period.as_secs_f64(), }, }) } + +/// Generates HTTP path for a logo based on its `size`, `mode` and `lang` attributes. +/// These are joined with `-`. +/// Defaults to `"logo"` if no optional attributes were provided. +fn generate_http_path(logo: &LogoDef) -> String { + let size = logo.size.as_ref().map(|s| format!("-{}", s)).unwrap_or_default(); + let mode = logo.mode.as_ref().map(|m| format!("-{}", m)).unwrap_or_default(); + let lang = logo.lang.as_ref().map(|l| format!("-{}", l)).unwrap_or_default(); + + format!("logo{size}{mode}{lang}.svg") +} diff --git a/backend/src/http/handlers.rs b/backend/src/http/handlers.rs index 23e290c22..88993197b 100644 --- a/backend/src/http/handlers.rs +++ b/backend/src/http/handlers.rs @@ -4,7 +4,7 @@ use hyper::{ http::{uri::PathAndQuery, HeaderValue}, Method, Request, StatusCode, Uri, }; -use juniper::{http::GraphQLResponse, graphql_value}; +use juniper::{graphql_value, http::{GraphQLRequest, GraphQLResponse}}; use std::{ collections::HashSet, fmt, @@ -30,19 +30,8 @@ use super::{Context, Response, response}; /// This is the main HTTP entry point, called for each incoming request. pub(super) async fn handle(req: Request, ctx: Arc) -> Response { let time_incoming = Instant::now(); - trace!( - method = ?req.method(), - path = req.uri().path_and_query().map_or("", |pq| pq.as_str()), - "Incoming HTTP request", - ); - if ctx.config.log.log_http_headers { - let mut out = String::new(); - for (name, value) in req.headers() { - use std::fmt::Write; - write!(out, "\n {}: {}", name, String::from_utf8_lossy(value.as_bytes())).unwrap(); - } - trace!("HTTP Headers: {}", out); - } + super::log::req::log(&req); + super::log::headers::log(&req); let method = req.method().clone(); let path = req.uri().path().trim_end_matches('/'); @@ -185,39 +174,12 @@ async fn handle_rss_request(path: &str, ctx: &Arc) -> Result, ctx: &Context) -> Result { - // TODO: With Juniper 0.16, this function can likely be simplified! - - /// This is basically `juniper::http::GraphQLRequest`. We unfortunately have - /// to duplicate it here to get access to the fields (which are private in - /// Juniper 0.15). - #[derive(serde::Deserialize)] - struct GraphQLReq { - query: String, - variables: Option, - #[serde(rename = "operationName")] - operation_name: Option, - } - - impl GraphQLReq { - fn variables(&self) -> juniper::Variables { - self.variables - .as_ref() - .and_then(|iv| { - iv.to_object_value().map(|o| { - o.into_iter() - .map(|(k, v)| (k.to_owned(), v.clone())) - .collect() - }) - }) - .unwrap_or_default() - } - } - - impl fmt::Display for GraphQLReq { + struct GqlReqPrinter<'a>(&'a GraphQLRequest); + impl fmt::Display for GqlReqPrinter<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - writeln!(f, "Query:\n{}", self.query)?; - writeln!(f, "Operation name: {:?}", self.operation_name)?; - writeln!(f, "Variables: {}", serde_json::to_string_pretty(&self.variables).unwrap())?; + writeln!(f, "Query:\n{}", self.0.query)?; + writeln!(f, "Operation name: {:?}", self.0.operation_name)?; + writeln!(f, "Variables: {}", serde_json::to_string_pretty(&self.0.variables).unwrap())?; Ok(()) } } @@ -245,7 +207,7 @@ async fn handle_api(req: Request, ctx: &Context) -> Result(&raw_body).map_err(|e| { + let gql_request = serde_json::from_slice::(&raw_body).map_err(|e| { warn!("Failed to deserialize GraphQL request: {e}"); response::bad_request("invalid GraphQL request body") })?; @@ -327,7 +289,7 @@ async fn handle_api(req: Request, ctx: &Context) -> Result, ctx: &Context) -> Result { - debug!("Failed request:\n{gql_request}"); + debug!("Failed request:\n{}", GqlReqPrinter(&gql_request)); debug!("Rolling back DB transaction..."); if let Err(e) = tx.rollback().await { error!("Failed to rollback transaction: {e}\nWill give up now. Transaction \ @@ -356,16 +318,6 @@ async fn handle_api(req: Request, ctx: &Context) -> Result { let error_to_msg = |e: &juniper::ExecutionError| { - // Uh oh: `message` is a `#[doc(hidden)]` method, which usually - // means that the library authors only need it public for macro - // purposes and that its not actually part of the public API - // that is evolved through semver. But: lots of things in - // Juniper are weird, so this doesn't necessarily have any - // intent behind it. Also, we can always get at the same data - // by serializing this error as JSON and poking it out like - // that. Using the method is just easier. If the method is ever - // removed, we have to use the JSON solution. But I'm very sure - // it won't be removed in 0.15.x anymore. format!("{} (at `{}`)", e.error().message(), e.path().join(".")) }; @@ -415,7 +367,7 @@ async fn handle_api(req: Request, ctx: &Context) -> Result) { + trace!( + method = ?req.method(), + path = req.uri().path_and_query().map_or("", |pq| pq.as_str()), + "Incoming HTTP request", + ); + } +} + + + +pub mod headers { + use super::*; + + pub fn log(req: &Request) { + if tracing::enabled!(tracing::Level::TRACE) { + let mut out = String::new(); + for (name, value) in req.headers() { + use std::fmt::Write; + write!(out, "\n {}: {}", name, String::from_utf8_lossy(value.as_bytes())).unwrap(); + } + trace!("HTTP Headers: {}", out); + } + } +} diff --git a/backend/src/http/mod.rs b/backend/src/http/mod.rs index 02565fbdd..4cef544b8 100644 --- a/backend/src/http/mod.rs +++ b/backend/src/http/mod.rs @@ -38,6 +38,7 @@ use self::{ mod assets; mod handlers; +mod log; pub(crate) mod response; diff --git a/backend/src/logger.rs b/backend/src/logger.rs index 80bb5a92f..31f8024f7 100644 --- a/backend/src/logger.rs +++ b/backend/src/logger.rs @@ -52,11 +52,6 @@ pub(crate) struct LogConfig { /// If this is set to `false`, log messages are not written to stdout. #[config(default = true)] pub(crate) stdout: bool, - - /// If set to `true`, HTTP header of each incoming request are logged - /// (with 'trace' level). - #[config(default = false)] - pub(crate) log_http_headers: bool, } #[derive(Debug, Deserialize)] diff --git a/backend/src/main.rs b/backend/src/main.rs index 9787b61d8..115efbeac 100644 --- a/backend/src/main.rs +++ b/backend/src/main.rs @@ -24,6 +24,7 @@ mod db; mod http; mod logger; mod metrics; +mod model; mod prelude; mod search; mod sync; @@ -83,6 +84,9 @@ async fn run() -> Result<()> { bunt::set_stdout_color_choice(args.stdout_color()); bunt::set_stderr_color_choice(args.stderr_color()); + rustls::crypto::ring::default_provider().install_default() + .map_err(|_| anyhow!("failed to install crypto provider"))?; + // Dispatch subcommand. match &args.cmd { Command::Serve { shared } => { @@ -131,7 +135,7 @@ async fn start_server(config: Config) -> Result<()> { info!("Starting Tobira backend ..."); trace!("Configuration: {:#?}", config); let db = connect_and_migrate_db(&config).await?; - let search = search::Client::new(config.meili.clone()); + let search = search::Client::new(config.meili.clone())?; if let Err(e) = search.check_connection().await { warn!("Could not connect to Meili search index: {e:?}"); } @@ -152,6 +156,7 @@ async fn start_worker(config: Config) -> Result { let mut search_conn = db.get().await?; let sync_conn = db.get().await?; + let text_conn = db.get().await?; let db_maintenance_conn = db.get().await?; let stats_conn = db.get().await?; let auth_config = config.auth.clone(); @@ -165,6 +170,10 @@ async fn start_worker(config: Config) -> Result { res.map(|()| unreachable!("sync task unexpectedly stopped")) .context("error synchronizing with Opencast") } + res = sync::text::fetch_update(text_conn, &config, true) => { + res.map(|()| unreachable!("sync text task unexpectedly stopped")) + .context("error downloading text assets") + } never = sync::stats::run_daemon(stats_conn, &config) => { never } never = auth::db_maintenance(&db_maintenance_conn, &auth_config) => { never } } diff --git a/backend/src/model/extra_metadata.rs b/backend/src/model/extra_metadata.rs new file mode 100644 index 000000000..a851218e8 --- /dev/null +++ b/backend/src/model/extra_metadata.rs @@ -0,0 +1,98 @@ +use std::collections::HashMap; + +use bytes::BytesMut; +use postgres_types::{FromSql, ToSql}; +use serde::{Deserialize, Serialize}; +use juniper::{GraphQLScalar, InputValue, ScalarValue}; + +use crate::prelude::*; + + + +/// Represents extra metadata in the DB. Is a map from "namespace" to a +/// `string -> string array` map. +/// +/// Each namespace key is a URL pointing to an XML namespace definition OR +/// `"dcterms"` for the dc terms (most common namespace). The value for each +/// namespace is a simple string-key map where each value is an array of string +/// values. +#[derive(Debug, Serialize, Deserialize, Default, GraphQLScalar)] +#[cfg_attr(test, derive(PartialEq, Eq))] +#[graphql( + description = "Arbitrary metadata for events/series. Serialized as JSON object.", + with = Self, + parse_token(String), +)] +pub(crate) struct ExtraMetadata { + /// Metadata of the dcterms + #[serde(default)] + pub(crate) dcterms: MetadataMap, + + /// Extended metadata. + #[serde(flatten)] + pub(crate) extended: HashMap, +} + +type MetadataMap = HashMap>; + +impl ToSql for ExtraMetadata { + fn to_sql( + &self, + ty: &postgres_types::Type, + out: &mut BytesMut, + ) -> Result> { + serde_json::to_value(self) + .expect("failed to convert `ExtraMetadata` to JSON value") + .to_sql(ty, out) + } + + fn accepts(ty: &postgres_types::Type) -> bool { + ::accepts(ty) + } + + postgres_types::to_sql_checked!(); +} + +impl<'a> FromSql<'a> for ExtraMetadata { + fn from_sql( + ty: &postgres_types::Type, + raw: &'a [u8], + ) -> Result> { + serde_json::from_value(<_>::from_sql(ty, raw)?).map_err(Into::into) + } + + fn accepts(ty: &postgres_types::Type) -> bool { + ::accepts(ty) + } +} + +impl ExtraMetadata { + fn to_output(&self) -> juniper::Value { + use juniper::Value; + + std::iter::once(("dcterms", &self.dcterms)) + .chain(self.extended.iter().map(|(k, v)| (&**k, v))) + .map(|(k, v)| { + let value = v.iter() + .map(|(k, v)| { + let elements = v.iter() + .map(|s| Value::Scalar(S::from(s.clone()))) + .collect(); + (k, Value::List(elements)) + }) + .collect::>(); + + (k, Value::Object(value)) + }) + .collect::>() + .pipe(Value::Object) + } + + fn from_input(input: &InputValue) -> Result { + // I did not want to waste time implementing this now, given that we + // likely never use it. + let _ = input; + todo!("ExtraMetadata cannot be used as input value yet") + } +} + diff --git a/backend/src/model/key.rs b/backend/src/model/key.rs new file mode 100644 index 000000000..f8f77d80f --- /dev/null +++ b/backend/src/model/key.rs @@ -0,0 +1,94 @@ +use std::fmt; + +use bytes::BytesMut; +use postgres_types::{FromSql, ToSql}; +use serde::{Deserialize, Serialize}; + +use crate::util::{BASE64_DIGITS, base64_decode}; + + + +/// Our primary ID type, which we call "key". In the database, it's a +/// `bigint` (`i64`), but we have a separate Rust type for it for several +/// reasons. Implements `ToSql` and `FromSql` by casting to/from `i64`. +#[derive(Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub(crate) struct Key(pub(crate) u64); + +impl Key { + pub(crate) fn from_base64(s: &str) -> Option { + if s.len() != 11 { + return None; + } + + decode_base64(s.as_bytes()) + } + + pub(crate) fn to_base64<'a>(&self, out: &'a mut [u8; 11]) -> &'a str { + // Base64 encoding. After this loop, `n` is always 0, because `u64::MAX` + // divided by 64 eleven times is 0. + let mut n = self.0; + for i in (0..out.len()).rev() { + out[i] = BASE64_DIGITS[(n % 64) as usize]; + n /= 64; + } + debug_assert!(n == 0); + + std::str::from_utf8(out) + .expect("bug: base64 did produce non-ASCII character") + } +} + +fn decode_base64(src: &[u8]) -> Option { + let src: [u8; 11] = src.try_into().ok()?; + + // Make sure the string doesn't decode to a number > `u64::MAX`. Luckily, + // checking that is easy. `u64::MAX` encodes to `P__________`, so the next + // higher number would carry through and make the highest digit a `Q`. So we + // just make sure the first digit is between 'A' and 'P'. + if src[0] > b'P' || src[0] < b'A' { + return None; + } + + src.iter() + .rev() + .enumerate() + .map(|(i, &d)| base64_decode(d).map(|n| n as u64 * 64u64.pow(i as u32))) + .sum::>() + .map(Key) +} + +impl ToSql for Key { + fn to_sql( + &self, + ty: &postgres_types::Type, + out: &mut BytesMut, + ) -> Result> { + (self.0 as i64).to_sql(ty, out) + } + + fn accepts(ty: &postgres_types::Type) -> bool { + ::accepts(ty) + } + + postgres_types::to_sql_checked!(); +} + +impl<'a> FromSql<'a> for Key { + fn from_sql( + ty: &postgres_types::Type, + raw: &'a [u8], + ) -> Result> { + i64::from_sql(ty, raw).map(|i| Key(i as u64)) + } + + fn accepts(ty: &postgres_types::Type) -> bool { + ::accepts(ty) + } +} + +impl fmt::Debug for Key { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut buf = [0; 11]; + write!(f, "Key({} :: {})", self.0 as i64, self.to_base64(&mut buf)) + } +} diff --git a/backend/src/model/mod.rs b/backend/src/model/mod.rs new file mode 100644 index 000000000..eaa0ce23f --- /dev/null +++ b/backend/src/model/mod.rs @@ -0,0 +1,18 @@ +//! Items that define the domain data model and logic. +//! +//! There are many types that represent "user-visible data", i.e. data that +//! directly models the application domain and not technical helpers (like a DB +//! pool). These are big high level types like `Event`, but also things like +//! `EventTrack` and `TranslatedString`. These commonly don't neatly fit into +//! either of `db`, `api` or any other submodule as they are used in multiple +//! situations (loading from DB, exposing via API, ...). + +mod extra_metadata; +mod key; +mod translated_string; + +pub(crate) use self::{ + extra_metadata::ExtraMetadata, + key::Key, + translated_string::{LangKey, TranslatedString}, +}; diff --git a/backend/src/model/translated_string.rs b/backend/src/model/translated_string.rs new file mode 100644 index 000000000..28360f4bb --- /dev/null +++ b/backend/src/model/translated_string.rs @@ -0,0 +1,136 @@ +use std::{collections::HashMap, fmt, ops::Deref, str::FromStr}; +use bytes::BytesMut; +use fallible_iterator::FallibleIterator; +use juniper::{GraphQLScalar, InputValue, ScalarValue}; +use postgres_types::{FromSql, ToSql}; +use serde::{Deserialize, Serialize}; +use anyhow::{anyhow, Error}; + +use crate::prelude::*; + + +/// A string specified in different languages. Entry 'default' is required. +#[derive(Serialize, Deserialize, Clone, GraphQLScalar)] +#[serde(try_from = "HashMap")] +#[graphql(parse_token(String))] +pub(crate) struct TranslatedString(HashMap); + +impl TranslatedString { + pub(crate) fn default(&self) -> &str { + &self.0[&LangKey::Default] + } + + fn to_output(&self) -> juniper::Value { + self.0.iter() + .map(|(k, v)| (k.as_ref(), juniper::Value::scalar(v.to_owned()))) + .collect::>() + .pipe(juniper::Value::Object) + } + + fn from_input(input: &InputValue) -> Result { + // I did not want to waste time implementing this now, given that we + // likely never use it. + let _ = input; + todo!("TranslatedString cannot be used as input value yet") + } +} + +impl Deref for TranslatedString { + type Target = HashMap; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl TryFrom> for TranslatedString { + type Error = Error; + + fn try_from(map: HashMap) -> Result { + if !map.contains_key(&LangKey::Default) { + return Err(anyhow!("Translated string must include 'default' entry.")); + } + + Ok(Self(map)) + } +} + +impl fmt::Debug for TranslatedString { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("TranslatedString ")?; + f.debug_map().entries(self.0.iter()).finish() + } +} + +#[derive(Clone, Serialize, Deserialize, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)] +#[serde(rename_all = "lowercase")] +pub(crate) enum LangKey { + #[serde(alias = "*")] + Default, + En, + De, +} + +impl fmt::Display for LangKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.serialize(f) + } +} + +impl AsRef for LangKey { + fn as_ref(&self) -> &str { + match self { + LangKey::Default => "default", + LangKey::En => "en", + LangKey::De => "de", + } + } +} + +impl FromStr for LangKey { + type Err = serde::de::value::Error; + + fn from_str(s: &str) -> std::result::Result { + Self::deserialize(serde::de::value::BorrowedStrDeserializer::new(s)) + } +} + +impl ToSql for TranslatedString { + fn to_sql( + &self, + _: &postgres_types::Type, + out: &mut BytesMut, + ) -> Result> { + let values = self.0.iter().map(|(k, v)| (k.as_ref(), Some(v.as_str()))); + postgres_protocol::types::hstore_to_sql(values, out)?; + Ok(postgres_types::IsNull::No) + } + + fn accepts(ty: &postgres_types::Type) -> bool { + ty.name() == "hstore" + } + + postgres_types::to_sql_checked!(); +} + + + +impl<'a> FromSql<'a> for TranslatedString { + fn from_sql( + _: &postgres_types::Type, + raw: &'a [u8], + ) -> Result> { + let map: HashMap = postgres_protocol::types::hstore_from_sql(raw)? + .map(|(k, v)| { + let v = v.ok_or("translated label contained null value in hstore")?; + let k = k.parse()?; + Ok((k, v.to_owned())) + }) + .collect()?; + + Ok(map.try_into()?) + } + + fn accepts(ty: &postgres_types::Type) -> bool { + ty.name() == "hstore" + } +} diff --git a/backend/src/rss.rs b/backend/src/rss.rs index 2ce02806f..0c8f51397 100644 --- a/backend/src/rss.rs +++ b/backend/src/rss.rs @@ -6,8 +6,9 @@ use futures::TryStreamExt; use ogrim::xml; use crate::{ - db::{types::{EventTrack, Key}, self, util::{impl_from_db, FromDb, dbargs}}, + db::{types::EventTrack, self, util::{impl_from_db, FromDb, dbargs}}, http::{Context, response::{bad_request, self, not_found}, Response}, + model::Key, config::HttpHost, prelude::*, }; diff --git a/backend/src/search/event.rs b/backend/src/search/event.rs index 71e45a5f5..564cb9aee 100644 --- a/backend/src/search/event.rs +++ b/backend/src/search/event.rs @@ -1,14 +1,28 @@ +use std::{cmp::{max, min}, collections::{BTreeMap, BinaryHeap, HashMap}, fmt::Write}; + use chrono::{DateTime, Utc}; -use meilisearch_sdk::indexes::Index; +use fallible_iterator::FallibleIterator; +use meilisearch_sdk::{indexes::Index, search::MatchRange}; +use postgres_types::FromSql; use serde::{Serialize, Deserialize}; use tokio_postgres::GenericClient; use crate::{ + api::model::search::{ByteSpan, TextMatch}, + db::{ + types::{TextAssetType, TimespanText}, + util::{collect_rows_mapped, impl_from_db} + }, + model::Key, prelude::*, - db::{types::Key, util::{collect_rows_mapped, impl_from_db}}, + util::{base64_decode, BASE64_DIGITS}, }; -use super::{realm::Realm, util::{self, FieldAbilities}, IndexItem, IndexItemKind, SearchId}; +use super::{ + realm::Realm, + util::{self, is_stop_word, FieldAbilities}, + IndexItem, IndexItemKind, SearchId, +}; @@ -31,6 +45,7 @@ pub(crate) struct Event { pub(crate) end_time_timestamp: Option, pub(crate) is_live: bool, pub(crate) audio_only: bool, + pub(crate) has_password: bool, // These are filterable. All roles are hex encoded to work around Meilis // inability to filter case-sensitively. For roles, we have to compare @@ -42,6 +57,7 @@ pub(crate) struct Event { // we just assume that the cases where this matters are very rare. And in // those cases we just accept that our endpoint returns fewer than X // items. + pub(crate) preview_roles: Vec, pub(crate) read_roles: Vec, pub(crate) write_roles: Vec, @@ -49,6 +65,9 @@ pub(crate) struct Event { // store it explicitly to filter for this condition in Meili. pub(crate) listed: bool, pub(crate) host_realms: Vec, + + pub(crate) caption_texts: TextSearchIndex, + pub(crate) slide_texts: TextSearchIndex, } impl IndexItem for Event { @@ -64,7 +83,8 @@ impl_from_db!( search_events.{ id, series, series_title, title, description, creators, thumbnail, duration, is_live, updated, created, start_time, end_time, audio_only, - read_roles, write_roles, host_realms, + read_roles, write_roles, preview_roles, has_password, + host_realms, slide_texts, caption_texts, }, }, |row| { @@ -90,10 +110,16 @@ impl_from_db!( start_time: row.start_time(), end_time, end_time_timestamp: end_time.map(|date_time| date_time.timestamp()), + preview_roles: util::encode_acl(&row.preview_roles::>()), read_roles: util::encode_acl(&row.read_roles::>()), write_roles: util::encode_acl(&row.write_roles::>()), listed: host_realms.iter().any(|realm| !realm.is_user_realm()), host_realms, + slide_texts: row.slide_texts::>() + .unwrap_or_else(TextSearchIndex::empty), + caption_texts: row.caption_texts::>() + .unwrap_or_else(TextSearchIndex::empty), + has_password: row.has_password(), } } ); @@ -121,8 +147,758 @@ impl Event { pub(super) async fn prepare_index(index: &Index) -> Result<()> { util::lazy_set_special_attributes(index, "event", FieldAbilities { - searchable: &["title", "creators", "description", "series_title"], - filterable: &["listed", "read_roles", "write_roles", "is_live", "end_time_timestamp", "created_timestamp"], + searchable: &[ + "title", + "creators", + "description", + "series_title", + "slide_texts.texts", + "caption_texts.texts", + ], + filterable: &[ + "listed", + "preview_roles", + "read_roles", + "write_roles", + "is_live", + "end_time_timestamp", + "created_timestamp" + ], sortable: &["updated_timestamp"], }).await } + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +struct SearchTimespan { + start: u64, + duration: u64, +} + +impl SearchTimespan { + /// We reduce the precision of our timestamps to 100ms, as more is really + /// not needed for search. + const PRECISION: u64 = 100; + + fn from_tst(v: &TimespanText) -> Self { + Self { + start: v.span_start as u64 / Self::PRECISION, + duration: (v.span_end as u64).saturating_sub(v.span_start as u64) / Self::PRECISION, + } + } + + fn api_start(&self) -> f64 { + (self.start * Self::PRECISION) as f64 + } + + fn api_duration(&self) -> f64 { + (self.duration * Self::PRECISION) as f64 + } +} + +/// What we store in Meili to nicely search through texts and then lookup the +/// corresponding timespan for the match. This is built inside `FromSql` as +/// that way, we can avoid useless heap allocations. +#[derive(Serialize, Deserialize, Debug)] +pub(crate) struct TextSearchIndex { + /// This contains all source strings concatenated with `;` as separator. You + /// might wonder that `;` is a bad choice as it can also appear in the + /// source strings. However, `;` is treated as a hard separator by Meili. + /// Even with phrase search (""), the input query is split at `;`, so the + /// query `"foo;bar"` will always find documents containing "foo" + /// and "bar". It makes no difference whether this field contains `foo;bar` + /// or `foo𑱱bar` (random other separator), the search finds both. From all + /// the hard separators, we use `;` as it is only one byte long. + texts: String, + + /// This field is for translating the byte offset we get from Meili + /// (for matches found in `texts`) to the time span inside the video. I + /// tried to keep this index as small as possible while still allowing fast + /// lookups. + /// + /// This string starts with three hex digits specifying how many bytes are + /// used to encode the three different integers fields. Add one to these + /// digits to get the actual number of bytes. For example `121` means that: + /// - The byte offset field is stored with 2 base64 digits. + /// - The start timestamp field is stored with 3 base64 digits. + /// - The duration field is stored with 2 base64 digits. + /// + /// After this three byte header follows an array where each item contains + /// these three fields (in that order), i.e. each item is 7 bytes in total + /// in the example above. Each field is base64 encoded. This array allows + /// random access, useful for binary search. + timespan_index: String, +} + +impl TextSearchIndex { + fn empty() -> Self { + Self { + texts: String::new(), + timespan_index: String::new(), + } + } + + /// Looks up a match position (byte offset inside `texts`) and returns the + /// index of the entry in this index. Panics if `texts` is empty, i.e. if + /// there is no way there is a match inside that field. + fn lookup(&self, match_range: &MatchRange) -> usize { + let lens = self.index_lengths(); + let index = self.index_entries(); + let entry_len = lens.entry_len; + + assert!(match_range.start < self.texts.len()); + assert!(index.len() % entry_len == 0, "broken index: incorrect length"); + + let decode_byte_offset = |slot: usize| -> u64 { + Self::decode_index_int(&index[slot * entry_len..][..lens.offset_digits as usize]) + }; + + + // Perform binary search over the index. We treat `entry_len` bytes in + // the index as one item here. + (|| { + let needle = match_range.start as u64; + + let num_entries = index.len() / entry_len; + let mut size = num_entries; + let mut left = 0; + let mut right = size; + while left < right { + let mid = left + size / 2; + let v = decode_byte_offset(mid); + + // This binary search isn't for an exact value but for a range. + // We want to find the entry with a "offset" value of <= needle, + // and where the next entry has an "offset" value > needle. + if needle < v { + // Needle is smaller than the offset value -> we have to + // look in the left half. + right = mid; + } else if needle == v || mid + 1 == num_entries { + // Either the needle matches the offset value + // (obvious success) or it is larger and we are looking at + // the last entry. In that case, we also return the last + // entry. + return mid; + } else { + // In this case, the needle is larger than the offset value, + // so we check if it is smaller than the next entry's + // offset value. If so, we found the correct entry, + // otherwise recurse on right half. + let next_v = decode_byte_offset(mid + 1); + if needle < next_v { + return mid; + } + left = mid + 1; + } + + size = right - left; + } + + // By construction, the first entry in the index always has the + // offset 0, and we just return the last entry if the needle is + // larger than the largest offset value. + unreachable!() + })() + } + + /// Returns the timespan of the given slot. + fn timespan_of_slot(&self, slot: usize) -> SearchTimespan { + let lens = self.index_lengths(); + let start_idx = slot * lens.entry_len as usize + lens.offset_digits as usize; + let duration_idx = start_idx + lens.start_digits as usize; + + let start_bytes = &self.index_entries()[start_idx..][..lens.start_digits as usize]; + let duration_bytes = &self.index_entries()[duration_idx..][..lens.duration_digits as usize]; + + SearchTimespan { + start: Self::decode_index_int(start_bytes), + duration: Self::decode_index_int(duration_bytes), + } + } + + /// Reads the index header and returns the lengths of all fields. + fn index_lengths(&self) -> IndexLengths { + let index = &self.timespan_index.as_bytes(); + + let offset_digits = index[0] - b'0' + 1; + let start_digits = index[1] - b'0' + 1; + let duration_digits = index[2] - b'0' + 1; + IndexLengths { + offset_digits, + start_digits, + duration_digits, + entry_len: (offset_digits + start_digits + duration_digits) as usize, + } + } + + /// Returns the main part of the index, the array of entries. Strips the + /// header specifying the lengths. + fn index_entries(&self) -> &[u8] { + &self.timespan_index.as_bytes()[3..] + } + + /// Decodes a base64 encoded integer. + fn decode_index_int(src: &[u8]) -> u64 { + // Least significant digits comes first, so we iterate in reverse + // and multiply by 64 each time. + let mut out = 0; + for byte in src.iter().rev() { + let digit = base64_decode(*byte).expect("invalid base64 digit in index"); + out = out * 64 + digit as u64; + } + out + } + + pub(crate) fn resolve_matches( + &self, + matches: &[MatchRange], + out: &mut Vec, + ty: TextAssetType, + ) { + if matches.is_empty() || self.texts.is_empty() { + return; + } + + // Resolve all matches and then bucket them by the individual text they + // belong to. + let mut entries = HashMap::new(); + for match_range in matches { + // We ignore super short matches. For example, including `a` + // anywhere in the query would result in tons of matches + // otherwise. + if match_range.length <= 1 { + continue; + } + + // Get correct indices and the actual text snippet. Unfortunately, + // Meilisearch might sometimes return invalid indices that slice + // UTF-8 codepoints in half, so we need to protect against that. + let start = ceil_char_boundary(&self.texts, match_range.start); + let end = ceil_char_boundary(&self.texts, match_range.start + match_range.length); + let snippet = &self.texts[start..end]; + + // If the match is a single stop word, we ignore it. + if is_stop_word(snippet) { + continue; + } + + let slot = self.lookup(match_range); + let matches = entries.entry(slot as u32).or_insert_with(Vec::new); + + // We only consider a limited number of matches inside the same text + // to avoid getting way too large API responses. The frontend cuts + // off the text anyway at some point. + if matches.len() < 10 { + matches.push(match_range); + } + } + + // We reduce the number of matches if necessary. + const LIMIT: usize = 20; + let simplified = simplify_matches( + LIMIT, + entries.keys().map(|&slot| (slot, self.timespan_of_slot(slot as usize))) + ); + + for (slot, timespan) in simplified { + let matches = entries.get(&slot).unwrap(); + + // Get the range that includes all matches + let full_range = { + let mut it = matches.iter(); + let first = it.next().unwrap(); + let init = first.start..first.start + first.length; + let combined = it.fold(init, |acc, m| { + min(acc.start, m.start)..max(acc.end, m.start + m.length) + }); + + // Unfortunately, Meili can sometimes return invalid ranges, + // slicing into UTF-8 chars, so we also ceil here. + let start = ceil_char_boundary(&self.texts, combined.start); + let end = ceil_char_boundary(&self.texts, combined.end); + start..end + }; + + // Add a bit of margin to include more context in the text. We only + // include context from the same text though, meaning we only go to + // the next `;` or `\n`. + let range_with_context = { + let max_distance = 80; + let separators = &[';', '\n']; + + // First just add a fixed margin around the match, as a limit. + let margin_start = full_range.start.saturating_sub(max_distance); + let margin_end = std::cmp::min( + full_range.end + max_distance, + self.texts.len(), + ); + + let margin_start = ceil_char_boundary(&self.texts, margin_start); + let margin_end = ceil_char_boundary(&self.texts, margin_end); + + // Search forwards and backwards from the match point to find + // boundaries of the text. + let start = self.texts[margin_start..full_range.start] + .rfind(separators) + .map(|p| margin_start + p + 1) + .unwrap_or(margin_start); + let end = self.texts[full_range.end..margin_end] + .find(separators) + .map(|p| full_range.end + p) + .unwrap_or(margin_end); + start..end + }; + + let highlights = matches.iter().map(|m| { + ByteSpan { + start: (m.start - range_with_context.start) as u32, + len: m.length as u32, + } + }).collect(); + + + out.push(TextMatch { + start: timespan.api_start(), + duration: timespan.api_duration(), + text: self.texts[range_with_context].to_owned(), + ty, + highlights, + }); + } + } +} + + +/// Reduces the number of `matches` to `target_count` by successively merging +/// two matches. Merging results in a timespan covering both matches, and an +/// arbitrary slot from one of the inputs. +/// +/// The metric which two matches to merge next is the crux of this function. +/// Currently, it's simply "size of resulting interval", i.e. the two matches +/// are merged which will result in the smallest interval. This metric is +/// probably not ideal and we would rather also consider how much intervals +/// overlap or how far they are apart. But the algorithm currently depends on +/// some properties offered by the "size" metric, specifically: +/// - When searching for the best rightward partner, we can stop searching once +/// the next candidate has `start > best.end`. +/// - The merge of a+b is never a better rightward partner for any interval than +/// `a` or `b` would have been. +/// +/// Both of these properties are not strictly necessary for the algorithm to +/// work, but when implementing this, there were more important things to do, +/// so the simpler metric was kept. +/// +/// This function runs in O(n log n). The basic idea is to have list of +/// interval, sorted by start time, plus a heap to extract the best merge. The +/// main loop just loops as long as we still have too many intervals, extracts +/// the best merge and performs the merge. There are of course O(n²) many +/// possible merges, but for each interval, there is a clear best partner +/// (called BRP, best rightward partner), so our heap only needs to hold O(n) +/// items, as long as we update everything correctly. +/// +/// When merging, we would need to remove two intervals and add a new one. Doing +/// that naively in `intervals`, while keeping its order, would be O(n). What +/// we do instead is to replace the input interval with the lower `start` with +/// the merged one (this maintains the order), and to soft-delete the other +/// input interval. Soft-delete just means using a sentinal value in the list +/// to denote a deleted element. This slows down the BRP search, unfortunately, +/// and it's possible that some fringe cases will cause a quadratic runtime, +/// but that is very unlikely to happen with real world data. +/// +/// The BRP search is not ideal anyway: we limit the search artificially to +/// prevent quadratic blowup. When there are no overlapping intervals, that's +/// not a problem at all. But one might imagine optimizing the BRP search with +/// stricter runtime guarantees in the future. But it's not important for our +/// use case. +fn simplify_matches( + target_count: usize, + matches: impl Iterator + ExactSizeIterator, +) -> impl Iterator { + #[derive(Clone, Copy)] + struct Interval { + start: u64, + end: u64, + slot: u32, + } + + impl Interval { + fn invalid() -> Self { + Self { + start: u64::MAX, + end: 0, + slot: 0, + } + } + + fn is_invalid(&self) -> bool { + self.start > self.end + } + + fn size(&self) -> u64 { + self.end - self.start + } + } + + fn merge(a: Interval, b: Interval) -> Interval { + Interval { + start: min(a.start, b.start), + end: max(a.end, b.end), + slot: a.slot, // Just arbitrarily pick one + } + } + + + /// Condition: `intervals[base_idx].start <= intervals[brp_idx].start`. + #[derive(Copy, Clone, PartialEq, Eq)] + struct HeapEntry { + merged_size: u64, + /// The interval with the smaller `start`. For each interval (except the + /// last one), there will be exactly one heap entry with the `base_idx` + /// pointing to that interval. + base_idx: usize, + /// The BRP of the base. + brp_idx: usize, + } + + impl PartialOrd for HeapEntry { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } + } + + impl Ord for HeapEntry { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.merged_size.cmp(&other.merged_size) + .reverse() + // We include the indices only to match the `Eq` impl. The order of + // two entries with same `size` does not matter to the algorithm. + .then_with(|| (self.base_idx, self.brp_idx).cmp(&(other.base_idx, other.brp_idx))) + } + } + + + /// Returns the "best rightward partner" (BRP) for the interval at `idx`. + /// + /// This is the interval P with the minimum `end` value among all intervals that + /// have a `start` >= the `start` of the given interval. The latter condition + /// is equal to "all elements right of `idx`", since the slice is sorted by + /// `start`. Hence the name "rightward". + fn best_rightward_partner(intervals: &[Interval], idx: usize) -> Option { + const LIMIT: u32 = 8; + + let mut min_end = u64::MAX; + let mut best_candidate = None; + let mut checked = 0; + for (rel_candidate_idx, candidate) in intervals[idx + 1..].iter().enumerate() { + let candidate_idx = rel_candidate_idx + idx + 1; + if candidate.is_invalid() { + // Just skip invalid ones and don't consider them in the limit. + continue; + } else if candidate.start >= min_end { + // At this point, there cannot be any more better candidates. + break; + } + + if candidate.end < min_end { + min_end = candidate.end; + best_candidate = Some(candidate_idx); + } + + // Stop after some attempts. This is just here to avoid quadratic blowup + // of the algorithm. This will rarely be needed. For example, if there + // is no overlap between intervals (which is true for most events), the + // above `break` will be reached in the 2nd loop iteration every time. + // This limit will only be reached if there is a lot of overlap. And if + // the limit triggers, we just end up with a potential suboptimal + // merge. This just slightly decreases the output quality but does not + // break the algorithm or lead to any bad problems, as far as I can tell. + checked += 1; + if checked > LIMIT { + break; + } + } + best_candidate + } + + fn push_entry_for_base( + base_idx: usize, + intervals: &[Interval], + heap: &mut BinaryHeap, + ) { + if let Some(partner_idx) = best_rightward_partner(&intervals, base_idx) { + let merged = merge(intervals[base_idx], intervals[partner_idx]); + heap.push(HeapEntry { + merged_size: merged.size(), + base_idx, + brp_idx: partner_idx, + }); + } + } + + + // Early exit if there is nothing to do. + if matches.len() <= target_count { + return either::Left(matches); + } + + // Convert to internal representation and sort by start point. + let mut intervals = matches.map(|(slot, ts)| Interval { + start: ts.start, + end: ts.start + ts.duration, + slot, + }).collect::>(); + intervals.sort_by_key(|i| i.start); + + // Built initial heap + let mut heap = BinaryHeap::with_capacity(intervals.len()); + for (idx, _) in intervals.iter().enumerate() { + push_entry_for_base(idx, &intervals, &mut heap); + } + + // Main loop + let mut soft_deleted = 0; + while intervals.len() - soft_deleted > target_count { + // Heap has at least as many entries as `intervals`, so we can unwrap. + let entry = heap.pop().unwrap(); + let HeapEntry { base_idx, brp_idx, .. } = entry; + let base = intervals[base_idx]; + let brp = intervals[brp_idx]; + + + // If the base is invalid, we caught case (2) below: delayed deletion. + // We just ignore this entry as it's no longer valid. + if base.is_invalid() { + continue; + } + + + let merged = merge(base, brp); + + // Catch case (3) and (4). + // + // Case 4 (entries that used a now soft-deleted interval as BRP) is + // caught by `brp.is_invalid()`. Case 3 (entries that use an interval + // replaced by a merge) as BRP is caught by the size mismatch. Well, + // kind of. If the size is correct, the interval might still have been + // replaced by a merge, but the merge didn't change its `end`, so the + // heap entry was still valid and we can continue as normal. + if brp.is_invalid() || entry.merged_size != merged.size() { + // But in this case, we need to find the new BRP for `base` and push + // a new entry for it. Again, this new entry will have a size of >= + // the entry we just popped. + push_entry_for_base(base_idx, &intervals, &mut heap); + continue; + } + + + // Replace `base` with `merged`. This maintains the ordering by `start`. + // Also soft-delete `brp`. + intervals[base_idx] = merged; + let merged_idx = base_idx; + intervals[brp_idx] = Interval::invalid(); + soft_deleted += 1; + + + // Update heap. + // + // Some entries in the heap might reference `a` or `b`. Those are not + // valid anymore and need to be replaced. Replacing or removing things + // in the binary heap from `std` is impossible, it would require a + // secondary data structure. There are crates offering this, but we + // don't need it. Instead, we can use "delayed invalidation", meaning + // that we detect invalid entries after popping them from the heap and + // then just ignoring or replace them. + // + // Of course, this means that the heap is larger, as it contains dummy + // values. But the heap is still O(n) at all times: it starts with n-1 + // elements and each loop iteration, one element is popped and either 0 + // or 1 element is pushed, so it never grows in size. + // + // We care about heap entries `e` with: + // - (1) ... `e.base = base`: there was only one and it was just popped. + // - (2) ... `e.base = brp`: there exists one in the heap that should be + // deleted, but since we cannot easily delete it now, we do delayed + // deletion. + // - (3) ... `e.brp = base` + // - (4) ... `e.brp = brp` + // + // For cases (3) and (4), we need to find their base's new BRP. Instead + // of doing that now, we just do it when the entry is popped. The + // important property here is that the new BRP never results in a merge + // smaller than the previous BRP. That means the invalid entry will be + // popped from the heap not after the corrected one would. + // + // Finally, there is missing an entry with the new merged interval as + // `base`, which we push now. + push_entry_for_base(merged_idx, &intervals, &mut heap); + } + + either::Right( + intervals.into_iter() + .filter(|i| !i.is_invalid()) + .map(|i| (i.slot, SearchTimespan { + start: i.start, + duration: i.end - i.start, + })) + ) +} + +fn ceil_char_boundary(s: &str, idx: usize) -> usize { + if idx > s.len() { + s.len() + } else { + (0..3).map(|offset| idx + offset) + .find(|idx| s.is_char_boundary(*idx)) + .unwrap() + } +} + +struct IndexLengths { + offset_digits: u8, + start_digits: u8, + duration_digits: u8, + entry_len: usize, +} + +impl<'a> FromSql<'a> for TextSearchIndex { + fn from_sql( + ty: &postgres_types::Type, + raw: &'a [u8], + ) -> Result> { + // ----- Step 0: Postgres stuff ----------------------------------------------------------- + let member_type = match *ty.kind() { + postgres_types::Kind::Array(ref member) => member, + _ => panic!("expected array type"), + }; + + let array = postgres_protocol::types::array_from_sql(raw)?; + if array.dimensions().count()? > 1 { + return Err("array contains too many dimensions".into()); + } + + + // ----- Step 1: Read all data, transform and analyze it ---------------------------------- + // + // Clean texts by filtering out ones that are very unlikely to + // contribute to a meaningful search experience. The responsibility of + // delivering good texts to search for is still with Opencast and its + // captions and slide texts. It doesn't hurt to do some basic cleaning + // here though. + // + // We also gather some statistical data and concat all strings that have + // the same timespan into a single string (separated by newline). + let mut needed_main_capacity = 0; + let mut max_duration = 0; + let mut max_start = 0; + let mut texts = >::new(); + let mut it = array.values(); + while let Some(v) = it.next()? { + let ts = TimespanText::from_sql_nullable(member_type, v)?; + + // We exclude all strings that contain no segments longer than 1 + // byte. However, we do not remove these short segments from + // strings that do have longer words, as included strings should be + // kept intact, otherwise we will transform "Saw a dog" into "Saw + // dog", which is something we show the user. + // + // Counting 'bytes' instead of chars is deliberate, as we can only + // be sure about ASCII characters that they don't contain intrinsic + // meaning worth searching for individually. + let s = ts.t.trim(); + if s.split_whitespace().all(|part| part.len() <= 1) { + continue; + } + + let key = SearchTimespan::from_tst(&ts); + + // Skip empty timespans (after reducing to our lower precision). + if key.duration == 0 { + continue; + } + + let buf = texts.entry(key).or_default(); + + // If the text is already present in this span, we do not include it + // again. This can happen if some text is repeated in a slide for + // example. Having duplicates is not useful for search. The `4096` + // limit is just there to avoid quadratic blowup in case there are + // lots of texts in the same span. + if buf[..min(buf.len(), 4096)].contains(s) { + continue; + } + + if !buf.is_empty() { + needed_main_capacity += 1; + buf.push('\n'); + } + needed_main_capacity += s.len(); + buf.push_str(s); + + max_start = max(max_start, key.start); + max_duration = max(max_duration, key.duration); + } + + + // ----- Step 2: actually build the two fields that we store in Meili --------------------- + + // For the separators. + needed_main_capacity += texts.len().saturating_sub(1); + + // Figure out how much base64 digits we need for the three fields. + let required_digits = |max: u64| if max == 0 { 1 } else { max.ilog(64) + 1 }; + let offset_digits = required_digits(needed_main_capacity as u64); + let start_digits = required_digits(max_start); + let duration_digits = required_digits(max_duration); + + // Original duration uses u64, but is divided by 100. And 2^64 / 100 is + // less than 64^10. + assert!(offset_digits <= 10 && start_digits <= 10 && duration_digits <= 10); + + let index_len = 3 + (offset_digits + start_digits + duration_digits) as usize * texts.len(); + let mut out = Self { + texts: String::with_capacity(needed_main_capacity), + timespan_index: String::with_capacity(index_len), + }; + + // Write index header, specifying how each field is encoded. We subtract + // by 1 to make sure we always use exactly one digit. + write!( + out.timespan_index, + "{}{}{}", + offset_digits - 1, + start_digits - 1, + duration_digits - 1, + ).unwrap(); + + let mut encode_index_int = |mut value: u64, digits: u32| { + // This is reverse order encoding for convience/performance here. + // Least significant digit is leftmost. + for _ in 0..digits { + let digit = BASE64_DIGITS[value as usize % 64].into(); + out.timespan_index.push(digit); + value /= 64; + } + debug_assert!(value == 0); + }; + + for (ts, s) in texts { + if !out.texts.is_empty() { + out.texts.push(';'); + } + let offset = out.texts.len() as u64; + out.texts.push_str(&s); + encode_index_int(offset, offset_digits); + encode_index_int(ts.start, start_digits); + encode_index_int(ts.duration, duration_digits); + } + + Ok(out) + } + + fn accepts(ty: &postgres_types::Type) -> bool { + match *ty.kind() { + postgres_types::Kind::Array(ref inner) => TimespanText::accepts(inner), + _ => false, + } + } +} diff --git a/backend/src/search/mod.rs b/backend/src/search/mod.rs index 28a40c403..c1512731e 100644 --- a/backend/src/search/mod.rs +++ b/backend/src/search/mod.rs @@ -8,11 +8,11 @@ use meilisearch_sdk::{ errors::ErrorCode, task_info::TaskInfo, }; use postgres_types::{FromSql, ToSql}; -use secrecy::{Secret, ExposeSecret}; +use secrecy::{SecretString, ExposeSecret}; use serde::{Deserialize, Serialize}; use crate::{ - db::types::Key, + model::Key, prelude::*, config::HttpHost, }; @@ -25,7 +25,7 @@ mod series; pub(crate) mod writer; mod update; mod user; -mod util; +pub(crate) mod util; mod playlist; use self::writer::MeiliWriter; @@ -42,7 +42,7 @@ pub(crate) use self::{ /// The version of search index schema. Increase whenever there is a change that /// requires an index rebuild. -const VERSION: u32 = 5; +const VERSION: u32 = 7; // ===== Configuration ============================================================================ @@ -51,7 +51,7 @@ const VERSION: u32 = 5; pub(crate) struct MeiliConfig { /// The access key. This can be the master key, but ideally should be an API /// key that only has the priviliges it needs. - key: Secret, + key: SecretString, /// The host MeiliSearch is running on. As requests include the `key`, you /// should use HTTPS if Meili is running on another machine. In fact, HTTP @@ -73,7 +73,7 @@ impl MeiliConfig { /// Connects to Meili, erroring if Meili is not reachable. Does not check /// whether required indexes exist or whether they are in the correct shape! pub(crate) async fn connect(&self) -> Result { - let client = Client::new(self.clone()); + let client = Client::new(self.clone())?; client.check_connection().await .with_context(|| format!("failed to connect to MeiliSearch at '{}'", self.host))?; @@ -130,12 +130,12 @@ impl Client { /// Creates the search client, but without contacting Meili at all. Thus, /// neither the connection nor the existence of the indexes is checked. /// Also see [`Self::check_connection`] and [`Self::prepare`]. - pub(crate) fn new(config: MeiliConfig) -> Self { + pub(crate) fn new(config: MeiliConfig) -> Result { // Create client (this does not connect to Meili). let client = MeiliClient::new( &config.host.to_string(), Some(config.key.expose_secret()), - ); + ).context("failed to create Meili client")?; // Store some references to the indices (without checking whether they // actually exist!). @@ -146,7 +146,7 @@ impl Client { let user_index = client.index(&config.user_index_name()); let playlist_index = client.index(&config.playlist_index_name()); - Self { + Ok(Self { client, config, meta_index, @@ -155,7 +155,7 @@ impl Client { realm_index, user_index, playlist_index, - } + }) } /// Checks the connection to Meilisearch by accessing the `/health` endpoint. @@ -215,7 +215,7 @@ impl IndexItemKind { } } -pub(crate) trait IndexItem: serde::Serialize { +pub(crate) trait IndexItem: serde::Serialize + Send + Sync { const KIND: IndexItemKind; fn id(&self) -> SearchId; } @@ -336,7 +336,7 @@ pub(crate) async fn rebuild_if_necessary( for task in tasks { util::wait_on_task(task, meili).await?; } - info!("Completely rebuild search index"); + info!("Completely rebuilt search index"); meili.meta_index.add_or_replace(&[meta::Meta::current_clean()], None).await .context("failed to update index version document (clean)")?; diff --git a/backend/src/search/playlist.rs b/backend/src/search/playlist.rs index 62b276f79..30ff9d344 100644 --- a/backend/src/search/playlist.rs +++ b/backend/src/search/playlist.rs @@ -5,7 +5,8 @@ use tokio_postgres::GenericClient; use crate::{ prelude::*, - db::{types::Key, util::{collect_rows_mapped, impl_from_db}}, + model::Key, + db::util::{collect_rows_mapped, impl_from_db}, }; use super::{realm::Realm, util::{self, FieldAbilities}, IndexItem, IndexItemKind, SearchId}; diff --git a/backend/src/search/realm.rs b/backend/src/search/realm.rs index dff5f0937..6efc82374 100644 --- a/backend/src/search/realm.rs +++ b/backend/src/search/realm.rs @@ -3,13 +3,13 @@ use postgres_types::FromSql; use serde::{Serialize, Deserialize}; use tokio_postgres::GenericClient; -use crate::{prelude::*, db::{types::Key, util::{collect_rows_mapped, impl_from_db}}}; +use crate::{prelude::*, model::Key, db::util::{collect_rows_mapped, impl_from_db}}; use super::{util::{self, FieldAbilities}, IndexItem, IndexItemKind, SearchId}; /// Representation of realms in the search index. -#[derive(Serialize, Deserialize, Debug, FromSql)] +#[derive(Clone, Serialize, Deserialize, Debug, FromSql)] #[postgres(name = "search_realms")] pub(crate) struct Realm { pub(crate) id: SearchId, diff --git a/backend/src/search/series.rs b/backend/src/search/series.rs index f2acdce0c..b26bc15d5 100644 --- a/backend/src/search/series.rs +++ b/backend/src/search/series.rs @@ -5,7 +5,8 @@ use tokio_postgres::GenericClient; use crate::{ prelude::*, - db::{types::{Key, SearchThumbnailInfo}, util::{collect_rows_mapped, impl_from_db}}, + model::Key, + db::{types::SearchThumbnailInfo, util::{collect_rows_mapped, impl_from_db}}, }; use super::{realm::Realm, util::{self, FieldAbilities}, IndexItem, IndexItemKind, SearchId}; diff --git a/backend/src/search/stop-words.txt b/backend/src/search/stop-words.txt new file mode 100644 index 000000000..d45d410eb --- /dev/null +++ b/backend/src/search/stop-words.txt @@ -0,0 +1,396 @@ +# Single latin letters +a +b +c +d +e +f +g +h +i +j +k +l +m +n +o +p +q +r +s +t +u +v +w +x +y +z + + +# English +# 'a' and 'i' are already covered by single letters above. +# Based on NLTK's list of english stopwords +about +above +#after -> German word +again +against +all +am +an +and +any +are +as +at +be +because +been +before +being +below +between +both +but +by +can +could +did +do +does +doing +dont +down +during +each +few +for +from +further +had +has +have +having +he +her +here +hers +herself +him +himself +his +how +however +if +in +into +is +it +its +itself +just +like +many +me +more +#most -> German word +must +my +myself +no +nor +not # -> German word, not super common as stand-alone word and very much a English stop word, so we keep it +now +of +off +on +once +only +or +other +our +ours +ourselves +out +over +own +said +same +she +should +so +some +such # -> German word, but probably fine to keep it a stop word +than +that +the +their +theirs +them +themselves +then +there +#these -> German word +they +this +those +through +to +too +under +until +up +using +very +was +we +were +what +when +where +which +while +who +whom +why +will +with +would +you +your +yours +yourself +yourselves + + +# German +aber +alle +allem +allen +aller +alles +als +also #-> English word but also kind of stop-wordy, so keeping it +am +an +ander +andere +anderem +anderen +anderer +anderes +anderm +andern +anderr +anders +auch +auf +aus +bei +#bin -> english word +bis +bist +da +damit +dann +der +den +des +dem +#die -> english word +das +dass +daß +dazu +dein +deine +deinem +deinen +deiner +deines +denn +derer +dessen +dich +dir +du +#dies -> english word +diese +diesem +diesen +dieser +dieses +doch +dort +durch +ein +eine +einem +einen +einer +eines +einig +einige +einigem +einigen +einiger +einiges +einmal +er +ihn +ihm +es +etwas +euer +eure +eurem +euren +eurer +eures +für +gab +gegen +gewesen +hab +habe +haben +#hat -> English word +hatte +hatten +hier +hin +hinter +ich +mich +mir +ihr +ihre +ihrem +ihren +ihrer +ihres +euch +im +in +indem +ins +ist +jede +jedem +jeden +jeder +jedes +jene +jenem +jenen +jener +jenes +jetzt +kam +kann +kein +keine +keinem +keinen +keiner +keines +konnte +können +könnte +machen +#man -> English word +manche +manchem +manchen +mancher +manches +mein +meine +meinem +meinen +meiner +meines +mit +muss +musste +nach +nicht +nichts +noch +#nun -> English word +nur +ob +oder +ohne +sehr +sein +seine +seinem +seinen +seiner +seines +selbst +sich +sie +ihnen +sind +so +solche +solchem +solchen +solcher +solches +soll +sollte +sondern +sonst +sowie +über +um +und +uns +unse +unsem +unsen +unser +unses +unter +viel +vom +von +vor +während +#war -> English word +waren +warst +was +weg +weil +weiter +welche +welchem +welchen +welcher +welches +wenn +werde +werden +wie +wieder +will +wir +wird +wirst +wo +wollen +wollte +wurde +wurden +würde +würden +zu +zum +zur +zwar +zwischen diff --git a/backend/src/search/update.rs b/backend/src/search/update.rs index 035d82466..6a18cd4a2 100644 --- a/backend/src/search/update.rs +++ b/backend/src/search/update.rs @@ -5,7 +5,8 @@ use std::{ }; use crate::{ - db::{DbConnection, types::Key, util::select}, + db::{DbConnection, util::select}, + model::Key, prelude::*, util::Never, }; diff --git a/backend/src/search/user.rs b/backend/src/search/user.rs index a34f87ff7..5a974a5f9 100644 --- a/backend/src/search/user.rs +++ b/backend/src/search/user.rs @@ -4,7 +4,8 @@ use tokio_postgres::GenericClient; use crate::{ prelude::*, - db::{types::Key, util::{collect_rows_mapped, impl_from_db}}, + db::util::{collect_rows_mapped, impl_from_db}, + model::Key, }; use super::{util::{self, FieldAbilities}, IndexItem, IndexItemKind, SearchId}; diff --git a/backend/src/search/util.rs b/backend/src/search/util.rs index ae45af114..d5b5c1587 100644 --- a/backend/src/search/util.rs +++ b/backend/src/search/util.rs @@ -1,5 +1,6 @@ -use std::time::Duration; +use std::{sync::LazyLock, time::Duration}; +use ahash::AHashSet; use meilisearch_sdk::{errors::{Error, ErrorCode}, indexes::Index, tasks::Task, task_info::TaskInfo}; use crate::{ @@ -42,6 +43,35 @@ pub(super) async fn lazy_set_special_attributes( Ok(()) } +// This might seem like a good use case for a perfect hash table, but that's not +// even faster than this solution with a really fast hash. See +// https://github.com/LukasKalbertodt/case-insensitive-small-set-bench +pub static STOP_WORDS: LazyLock> = LazyLock::new(|| { + const RAW: &str = include_str!("stop-words.txt"); + RAW.lines() + .map(|l| l.split('#').next().unwrap().trim()) + .filter(|s| !s.is_empty()) + .collect() +}); + +/// Returns `true` iff the given string is contained in our list of stop words. +/// The comparison ignores ASCII case. +/// +/// We do have a few stop words with non-ASCII chars, but those are only in the +/// middle of the word. And ASCII-lowercasing is much easier and therefore +/// faster than proper Unicode-lowercasing. +pub fn is_stop_word(s: &str) -> bool { + if s.bytes().all(|b| b.is_ascii_lowercase()) { + STOP_WORDS.contains(s) + } else { + // This string allocation seems like it would really hurt + // performance, but it's really not that bad. All in all, doing + // it like this is actually quite fast. See + // https://github.com/LukasKalbertodt/case-insensitive-small-set-bench + STOP_WORDS.contains(s.to_ascii_lowercase().as_str()) + } +} + /// Encodes roles inside an ACL (e.g. for an event) to be stored in the index. /// The roles are hex encoded to be filterable properly with Meili's /// case-insensitive filtering. Also, `ROLE_ADMIN` is removed as an space @@ -54,6 +84,17 @@ pub(super) fn encode_acl(roles: &[String]) -> Vec { .collect() } +/// Decodes hex encoded ACL roles. +pub(crate) fn decode_acl(roles: &[String]) -> Vec { + roles.iter() + .map(|role| { + let bytes = hex::decode(role).expect("Failed to decode role"); + + String::from_utf8(bytes).expect("Failed to convert bytes to string") + }) + .collect() +} + /// Returns `true` if the given error has the error code `IndexNotFound` pub(super) fn is_index_not_found(err: &Error) -> bool { matches!(err, Error::Meilisearch(e) if e.error_code == ErrorCode::IndexNotFound) diff --git a/backend/src/sync/client.rs b/backend/src/sync/client.rs index e4d84819b..5bc5ad482 100644 --- a/backend/src/sync/client.rs +++ b/backend/src/sync/client.rs @@ -1,8 +1,8 @@ use std::time::{Duration, Instant}; -use base64::Engine; use bytes::Bytes; use chrono::{DateTime, Utc, TimeZone}; +use form_urlencoded::Serializer; use hyper::{ Response, Request, StatusCode, body::Incoming, @@ -10,11 +10,12 @@ use hyper::{ }; use hyper_rustls::HttpsConnector; use hyper_util::client::legacy::{connect::HttpConnector, Client}; -use secrecy::{ExposeSecret, Secret}; -use serde::Deserialize; +use secrecy::{ExposeSecret, SecretString}; +use serde::{Deserialize, Serialize}; use tap::TapFallible; use crate::{ + api::{model::acl::AclInputEntry, Context}, config::{Config, HttpHost}, prelude::*, sync::harvest::HarvestResponse, @@ -32,7 +33,7 @@ pub(crate) struct OcClient { http_client: Client, RequestBody>, sync_node: HttpHost, external_api_node: HttpHost, - auth_header: Secret, + auth_header: SecretString, username: String, } @@ -42,23 +43,12 @@ impl OcClient { const STATS_PATH: &'static str = "/tobira/stats"; pub(crate) fn new(config: &Config) -> Result { - let http_client = crate::util::http_client()?; - - // Prepare authentication - let credentials = format!( - "{}:{}", - config.sync.user, - config.sync.password.expose_secret(), - ); - let encoded_credentials = base64::engine::general_purpose::STANDARD.encode(credentials); - let auth_header = format!("Basic {}", encoded_credentials); - Ok(Self { - http_client, + http_client: crate::util::http_client()?, sync_node: config.opencast.sync_node().clone(), external_api_node: config.opencast.external_api_node().clone(), - auth_header: Secret::new(auth_header), - username: config.sync.user.clone(), + auth_header: config.opencast.basic_auth_header(), + username: config.opencast.user.clone(), }) } @@ -151,8 +141,8 @@ impl OcClient { Ok(out) } - pub async fn delete_event(&self, oc_id: &String) -> Result> { - let pq = format!("/api/events/{}", oc_id); + pub async fn delete_event(&self, oc_id: &str) -> Result> { + let pq = format!("/api/events/{oc_id}"); let req = self.authed_req_builder(&self.external_api_node, &pq) .method(http::Method::DELETE) .body(RequestBody::empty()) @@ -161,6 +151,89 @@ impl OcClient { self.http_client.request(req).await.map_err(Into::into) } + pub async fn update_event_acl( + &self, + oc_id: &str, + acl: &[AclInputEntry], + context: &Context, + ) -> Result> { + let pq = format!("/api/events/{oc_id}/acl"); + + // Temporary solution to add custom and preview roles + // Todo: remove again once frontend sends these roles. + let extra_roles_sql = "\ + select unnest(preview_roles) as role, 'preview' as action from events where opencast_id = $1 + union + select role, key as action + from jsonb_each_text( + (select custom_action_roles from events where opencast_id = $1) + ) as actions(key, value) + cross join lateral jsonb_array_elements_text(value::jsonb) as role(role) + "; + + let extra_roles = context.db.query_mapped(&extra_roles_sql, dbargs![&oc_id], |row| { + let role: String = row.get("role"); + let action: String = row.get("action"); + AclInput { + allow: true, + action, + role, + } + }).await?; + + let mut access_policy = Vec::new(); + access_policy.extend(extra_roles); + + for entry in acl { + access_policy.extend( + entry.actions.iter().map(|action| AclInput { + allow: true, + action: action.clone(), + role: entry.role.clone(), + }), + ); + } + + let params = Serializer::new(String::new()) + .append_pair("acl", &serde_json::to_string(&access_policy).expect("Failed to serialize")) + .finish(); + let req = self.authed_req_builder(&self.external_api_node, &pq) + .method(http::Method::PUT) + .header(http::header::CONTENT_TYPE, "application/x-www-form-urlencoded") + .body(params.into()) + .expect("failed to build request"); + + self.http_client.request(req).await.map_err(Into::into) + } + + pub async fn start_workflow(&self, oc_id: &str, workflow_id: &str) -> Result> { + let params = Serializer::new(String::new()) + .append_pair("event_identifier", &oc_id) + .append_pair("workflow_definition_identifier", &workflow_id) + .finish(); + let req = self.authed_req_builder(&self.external_api_node, "/api/workflows") + .method(http::Method::POST) + .header(http::header::CONTENT_TYPE, "application/x-www-form-urlencoded") + .body(params.into()) + .expect("failed to build request"); + + self.http_client.request(req).await.map_err(Into::into) + } + + pub async fn has_active_workflows(&self, oc_id: &str) -> Result { + let pq = format!("/api/events/{oc_id}"); + let req = self.authed_req_builder(&self.external_api_node, &pq) + .body(RequestBody::empty()) + .expect("failed to build request"); + let uri = req.uri().clone(); + let response = self.http_client.request(req) + .await + .with_context(|| format!("HTTP request failed (uri: '{uri}')"))?; + + let (out, _) = self.deserialize_response::(response, &uri).await?; + Ok(out.processing_state == "RUNNING") + } + fn build_authed_req(&self, node: &HttpHost, path_and_query: &str) -> (Uri, Request) { let req = self.authed_req_builder(node, path_and_query) .body(RequestBody::empty()) @@ -223,3 +296,15 @@ pub struct ExternalApiVersions { pub default: String, pub versions: Vec, } + +#[derive(Debug, Serialize)] +struct AclInput { + allow: bool, + action: String, + role: String, +} + +#[derive(Debug, Deserialize)] +pub struct EventStatus { + pub processing_state: String, +} diff --git a/backend/src/sync/cmd.rs b/backend/src/sync/cmd.rs index 7dc6485b2..0e0f22247 100644 --- a/backend/src/sync/cmd.rs +++ b/backend/src/sync/cmd.rs @@ -1,8 +1,11 @@ use std::time::Instant; +use chrono::{DateTime, Utc}; + use crate::{ config::Config, - prelude::*, db::DbConnection, + db::{util::select, DbConnection}, + prelude::*, }; @@ -31,17 +34,68 @@ pub(crate) enum SyncCommand { #[clap(long)] yes_absolutely_reset: bool, }, + + /// Commands related to fetching texts like subtitles and slide + /// transcriptions from Opencast. + Texts { + #[clap(subcommand)] + cmd: TextsCommand, + }, +} + +#[derive(Debug, clap::Subcommand)] +pub(crate) enum TextsCommand { + /// Fetches text attachments for events that have been enqueued. + Fetch { + /// If specified, the command will run forever and not stop after the + /// clear has been cleared. + #[clap(long)] + daemon: bool, + }, + + /// Queues events to fetch their text assets. + #[clap(arg_required_else_help = true)] + Queue { + /// Queues all events with text assets. + #[clap(long, exclusive = true)] + all: bool, + + /// Queues all events with incompletely fetched assets. + #[clap(long, exclusive = true)] + missing: bool, + + /// Opencast IDs of events to be queued. + #[clap(exclusive = true)] + events: Vec, + }, + + /// Removes events from the queue. + #[clap(arg_required_else_help = true)] + Dequeue { + /// Completely clears the queue. + #[clap(long, exclusive = true)] + all: bool, + + /// Opencast IDs of events to be removed from the queue. + #[clap(exclusive = true)] + events: Vec, + }, + + Status, } impl Args { pub(crate) fn is_long_running(&self) -> bool { - matches!(self.cmd, SyncCommand::Run { daemon: true }) + match self.cmd { + SyncCommand::Run { daemon } => daemon, + SyncCommand::Texts { cmd: TextsCommand::Fetch { daemon }} => daemon, + _ => false, + } } } /// Entry point for `search-index` commands. pub(crate) async fn run(args: &Args, config: &Config) -> Result<()> { - info!("Starting Tobira <-> Opencast synchronization ..."); trace!("Configuration: {:#?}", config); let db = crate::connect_and_migrate_db(config).await?; @@ -49,12 +103,23 @@ pub(crate) async fn run(args: &Args, config: &Config) -> Result<()> { match args.cmd { SyncCommand::Run { daemon } => { + info!("Starting Tobira <-> Opencast synchronization ..."); let before = Instant::now(); super::run(daemon, conn, config).await?; info!("Finished harvest in {:.2?}", before.elapsed()); Ok(()) } SyncCommand::Reset { yes_absolutely_reset: yes } => reset(conn, yes).await, + SyncCommand::Texts { cmd: TextsCommand::Fetch { daemon } } => { + super::text::fetch_update(conn, config, daemon).await + } + SyncCommand::Texts { cmd: TextsCommand::Status } => text_status(conn).await, + SyncCommand::Texts { cmd: TextsCommand::Queue { all, missing, ref events } } => { + text_queue(conn, all, missing, events).await + }, + SyncCommand::Texts { cmd: TextsCommand::Dequeue { all, ref events } } => { + text_dequeue(conn, all, events).await + } } } @@ -78,3 +143,147 @@ async fn reset(db: DbConnection, yes: bool) -> Result<()> { Ok(()) } + +macro_rules! info_line { + ($label:expr, $value:expr) => { + bunt::println!("{$dimmed}{}:{/$} {[blue+intense]}", $label, $value); + }; +} + +const HELPER_QUERIES: &str = " + events_with_text_assets as ( + select * + from events + where array_length(captions, 1) > 0 or slide_text is not null + ), + fetched_assets(id, num) as ( + select event_id, count(uri) + from event_texts + group by event_id + ), + incomplete_events(id, updated) as ( + select events.id, events.updated + from events + left join fetched_assets on fetched_assets.id = events.id + where coalesce(num, 0) + < coalesce(array_length(captions, 1), 0) + + case when slide_text is null then 0 else 1 end + ), + failed_events as ( + select id + from incomplete_events + left join event_texts_queue on event_texts_queue.event_id = id + where event_texts_queue.event_id is null + ) +"; + +async fn text_status(db: DbConnection) -> Result<()> { + let (selection, mapping) = select!( + queue_total_count: "(select count(*) from event_texts_queue)", + queue_ready_count: "(select count(*) from event_texts_queue where now() > fetch_after)", + queue_failed_count: "(select count(*) from event_texts_queue where retry_count > 0)", + next_ready: "(select min(fetch_after) from event_texts_queue)", + events_with_text: "(select count(*) from (select distinct event_id from event_texts) as t)", + num_texts: "(select count(*) from event_texts)", + events_with_text_assets: "(select count(*) from events_with_text_assets)", + incomplete_events: "(select count(*) from incomplete_events)", + failed_events: "(select count(*) from failed_events)", + ); + let sql = format!("with {HELPER_QUERIES} select {selection}"); + let row = db.query_one(&sql, &[]).await?; + + let queue_total_count: i64 = mapping.queue_total_count.of(&row); + let queue_ready_count: i64 = mapping.queue_ready_count.of(&row); + let queue_failed_count: i64 = mapping.queue_failed_count.of(&row); + let next_ready: Option> = mapping.next_ready.of(&row); + let events_with_text: i64 = mapping.events_with_text.of(&row); + let num_texts: i64 = mapping.num_texts.of(&row); + let events_with_text_assets: i64 = mapping.events_with_text_assets.of(&row); + let incomplete_events: i64 = mapping.incomplete_events.of(&row); + let failed_events: i64 = mapping.failed_events.of(&row); + + println!(); + bunt::println!("{$bold}# Queue:{/$}"); + info_line!("Queue length", queue_total_count); + info_line!("Ready entries in queue", queue_ready_count); + info_line!("Queue entries that failed before", queue_failed_count); + if let Some(next_ready) = next_ready { + info_line!("Next queue entry ready at", next_ready); + } + + println!(); + bunt::println!("{$bold}# Texts:{/$}"); + info_line!("Events with text assets", events_with_text_assets); + info_line!("Events with fetched texts", events_with_text); + info_line!("Fetched text assets", num_texts); + info_line!("Incomplete events", incomplete_events); + info_line!("Incomplete events not queued (failed)", failed_events); + + Ok(()) +} + +async fn text_queue( + db: DbConnection, + all: bool, + missing: bool, + events: &[String], +) -> Result<()> { + // These options are all mutually exclusive, checked by clap. + async fn insert_from_table(db: DbConnection, from: &str) -> Result<()> { + let sql = format!(" + with {HELPER_QUERIES} + insert into event_texts_queue (event_id, fetch_after, retry_count) + select id, updated, 0 from {from} + on conflict(event_id) do nothing + "); + let affected = db.execute(&sql, &[]).await?; + info!("Added {affected} entries to queue"); + Ok(()) + } + + + if all { + insert_from_table(db, "events_with_text_assets").await?; + } else if missing { + insert_from_table(db, "incomplete_events").await?; + } else { + let sql = " + insert into event_texts_queue (event_id, fetch_after, retry_count) + select id, updated, 0 from events + where opencast_id = any($1) + on conflict(event_id) do nothing + "; + let affected = db.execute(sql, &[&events]).await?; + info!("Added {affected} entries to queue"); + if affected != events.len() as u64 { + warn!("Inserted fewer entries into queue than specified. One or more \ + IDs might not exist or already queued."); + } + } + + Ok(()) +} + +async fn text_dequeue( + db: DbConnection, + all: bool, + events: &[String], +) -> Result<()> { + if all { + db.execute("truncate event_texts_queue", &[]).await?; + info!("Cleared text assets fetch queue"); + } else { + let affected = db.execute( + "delete from event_texts_queue + where event_id in (select id from events where opencast_id = any($1))", + &[&events], + ).await?; + info!("Removed {affected} entries from the queue"); + if affected != events.len() as u64 { + warn!("Fewer items were dequeued than specified -> some specified \ + events IDs were not in the queue"); + } + } + + Ok(()) +} diff --git a/backend/src/sync/harvest/mod.rs b/backend/src/sync/harvest/mod.rs index 20b61499a..e07e4dbf9 100644 --- a/backend/src/sync/harvest/mod.rs +++ b/backend/src/sync/harvest/mod.rs @@ -7,12 +7,10 @@ use serde::de::DeserializeOwned; use tokio_postgres::types::ToSql; use crate::{ - auth::ROLE_ADMIN, + auth::{is_special_eth_role, ROLE_ADMIN, ROLE_ANONYMOUS, ETH_ROLE_CREDENTIALS_RE}, config::Config, db::{ - self, - DbConnection, - types::{EventCaption, EventSegment, EventState, EventTrack, SeriesState}, + self, types::{Credentials, EventCaption, EventSegment, EventState, EventTrack, SeriesState}, DbConnection }, prelude::*, }; @@ -89,7 +87,7 @@ pub(crate) async fn run( // everything worked out alright. let last_updated = harvest_data.items.iter().rev().find_map(|item| item.updated()); let mut transaction = db.transaction().await?; - store_in_db(harvest_data.items, &sync_status, &mut transaction).await?; + store_in_db(harvest_data.items, &sync_status, &mut transaction, config).await?; SyncStatus::update_harvested_until(harvest_data.includes_items_until, &*transaction).await?; transaction.commit().await?; @@ -133,6 +131,7 @@ async fn store_in_db( items: Vec, sync_status: &SyncStatus, db: &mut deadpool_postgres::Transaction<'_>, + config: &Config, ) -> Result<()> { let before = Instant::now(); let mut upserted_events = 0; @@ -162,10 +161,34 @@ async fn store_in_db( }, }; + // (**ETH SPECIAL FEATURE**) + let credentials = config.sync.interpret_eth_passwords + .then(|| hashed_eth_credentials(&event.acl.read)) + .flatten(); + + // (**ETH SPECIAL FEATURE**) + // When an ETH event is password protected, read access doesn't suffice to view a video - everyone + // without write access needs to authenticate. So we need to shift all read roles down to preview, so + // users with what was previously read access are only allowed to preview and authenticate. + // `read_roles` now needs to be an exact copy of `write_roles`, and not a superset. + // With this, checks that allow actual read access will still succeed for users that also have write + // access. + // Additionally, since ETH requires that everyone with the link should be able to authenticate + // regardless of ACL inclusion, `ROLE_ANONYMOUS` is added to the preview roles. + if credentials.is_some() { + (event.acl.preview, event.acl.read) = (event.acl.read, event.acl.write.clone()); + event.acl.preview.push(ROLE_ANONYMOUS.into()); + } + + let filter_role = |role: &String| -> bool { + role != ROLE_ADMIN && !is_special_eth_role(role, config) + }; + // We always handle the admin role in a special way, so no need // to store it for every single event. - event.acl.read.retain(|role| role != ROLE_ADMIN); - event.acl.write.retain(|role| role != ROLE_ADMIN); + event.acl.preview.retain(filter_role); + event.acl.read.retain(filter_role); + event.acl.write.retain(filter_role); for (_, roles) in &mut event.acl.custom_actions.0 { roles.retain(|role| role != ROLE_ADMIN); @@ -192,6 +215,7 @@ async fn store_in_db( ("creators", &event.creators), ("thumbnail", &event.thumbnail), ("metadata", &event.metadata), + ("preview_roles", &event.acl.preview), ("read_roles", &event.acl.read), ("write_roles", &event.acl.write), ("custom_action_roles", &event.acl.custom_actions), @@ -199,6 +223,7 @@ async fn store_in_db( ("captions", &captions), ("segments", &segments), ("slide_text", &event.slide_text), + ("credentials", &credentials), ]).await?; trace!("Inserted or updated event {} ({})", event.id, event.title); @@ -213,7 +238,14 @@ async fn store_in_db( removed_events += 1; } - HarvestItem::Series(series) => { + HarvestItem::Series(mut series) => { + // (**ETH SPECIAL FEATURE**) + let series_credentials = config.sync.interpret_eth_passwords + .then(|| hashed_eth_credentials(&series.acl.read)) + .flatten(); + series.acl.read.retain(|role| !is_special_eth_role(role, config)); + series.acl.write.retain(|role| !is_special_eth_role(role, config)); + // We first simply upsert the series. let new_id = upsert(db, "series", "opencast_id", &[ ("opencast_id", &series.id), @@ -225,6 +257,7 @@ async fn store_in_db( ("updated", &series.updated), ("created", &series.created), ("metadata", &series.metadata), + ("credentials", &series_credentials), ]).await?; // But now we have to fix the foreign key for any events that @@ -379,6 +412,15 @@ fn check_affected_rows_removed(rows_affected: u64, entity: &str, opencast_id: &s } } +fn hashed_eth_credentials(read_roles: &[String]) -> Option { + read_roles.iter().find_map(|role| { + ETH_ROLE_CREDENTIALS_RE.captures(role).map(|captures| Credentials { + name: format!("sha1:{}", &captures[1]), + password: format!("sha1:{}", &captures[2]), + }) + }) +} + /// Inserts a new row or updates an existing one if the value in `unique_col` /// already exists. Returns the value of the `id` column, which is assumed to /// be `i64`. diff --git a/backend/src/sync/harvest/response.rs b/backend/src/sync/harvest/response.rs index 4042f9df7..90fe709c3 100644 --- a/backend/src/sync/harvest/response.rs +++ b/backend/src/sync/harvest/response.rs @@ -1,7 +1,10 @@ use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; -use crate::db::types::{CustomActions, EventCaption, EventTrack, EventSegment, ExtraMetadata}; +use crate::{ + db::types::{CustomActions, EventCaption, EventTrack, EventSegment}, + model::ExtraMetadata, +}; /// What the harvesting API returns. @@ -184,6 +187,8 @@ pub(crate) struct Acl { pub(crate) read: Vec, #[serde(default)] pub(crate) write: Vec, + #[serde(default)] + pub(crate) preview: Vec, #[serde(flatten)] pub(crate) custom_actions: CustomActions, } @@ -227,6 +232,7 @@ mod tests { title: "Cats".into(), description: Some("Several videos of cats".into()), acl: Acl { + preview: vec![], read: vec!["ROLE_ANONYMOUS".into()], write: vec!["ROLE_ANONYMOUS".into()], custom_actions: CustomActions::default(), @@ -240,6 +246,7 @@ mod tests { title: "Video Of A Tabby Cat".into(), description: None, acl: Acl { + preview: vec![], read: vec!["ROLE_ADMIN".into(), "ROLE_ANONYMOUS".into()], write: vec!["ROLE_ADMIN".into()], custom_actions: CustomActions::default(), @@ -360,6 +367,7 @@ mod tests { acl: Acl { read: vec!["ROLE_USER_BOB".into()], write: vec![], + preview: vec![], custom_actions: CustomActions::default(), }, entries: vec![ diff --git a/backend/src/sync/mod.rs b/backend/src/sync/mod.rs index 0af4df5dc..b24bc5392 100644 --- a/backend/src/sync/mod.rs +++ b/backend/src/sync/mod.rs @@ -1,4 +1,3 @@ -use secrecy::Secret; use core::fmt; use std::time::Duration; @@ -8,6 +7,7 @@ use crate::{config::Config, db::DbConnection, prelude::*}; pub(crate) mod cmd; pub(crate) mod harvest; pub(crate) mod stats; +pub(crate) mod text; mod client; mod status; @@ -39,14 +39,6 @@ pub(crate) async fn check_compatibility(client: &OcClient) -> Result<()> { #[derive(Debug, confique::Config)] pub(crate) struct SyncConfig { - /// Username of the user used to communicate with Opencast for data syncing. - /// This user has to have access to all events and series. Currently, that - /// user has to be admin. - user: String, - - /// Password of the user used to communicate with Opencast. - password: Secret, - /// A rough estimate of how many items (events & series) are transferred in /// each HTTP request while harvesting (syncing) with the Opencast /// instance. @@ -69,8 +61,18 @@ pub(crate) struct SyncConfig { /// relevant in `--daemon` mode. #[config(default = "30s", deserialize_with = crate::config::deserialize_duration)] pub(crate) poll_period: Duration, -} + /// Whether SHA1-hashed series passwords (as assignable by ETH's admin UI + /// build) are interpreted in Tobira. + #[config(default = false)] + pub(crate) interpret_eth_passwords: bool, + + /// Number of concurrent tasks with which Tobira downloads assets from + /// Opencast. The default should be a good sweet spot. Decrease to reduce + /// load on Opencast, increase to speed up download a bit. + #[config(default = 8)] + concurrent_download_tasks: u8, +} /// Version of the Tobira-module API in Opencast. struct ApiVersion { diff --git a/backend/src/sync/stats.rs b/backend/src/sync/stats.rs index 6a160aa6a..4d1f42441 100644 --- a/backend/src/sync/stats.rs +++ b/backend/src/sync/stats.rs @@ -86,8 +86,6 @@ struct ConfigStats { logout_link_overridden: bool, /// Value of `auth.pre_auth_external_links`. uses_pre_auth: bool, - /// Whether `theme.logo.small` is set. - has_narrow_logo: bool, } @@ -118,7 +116,6 @@ impl Stats { login_link_overridden: config.auth.login_link.is_some(), logout_link_overridden: config.auth.logout_link.is_some(), uses_pre_auth: config.auth.pre_auth_external_links, - has_narrow_logo: config.theme.logo.small.is_some(), }, }) } diff --git a/backend/src/sync/text/mod.rs b/backend/src/sync/text/mod.rs new file mode 100644 index 000000000..9a54746a9 --- /dev/null +++ b/backend/src/sync/text/mod.rs @@ -0,0 +1,531 @@ +use std::{sync::Arc, time::Duration}; + +use chrono::{DateTime, Utc}; +use futures::{pin_mut, StreamExt}; +use hyper::StatusCode; +use secrecy::ExposeSecret as _; +use url::Url; + +use crate::{ + config::Config, + db::{ + self, + types::{EventCaption, EventTextsQueueRecord, TextAssetType, TimespanText}, + util::{collect_rows_mapped, select}, + DbConnection, + }, + model::Key, + dbargs, + prelude::*, +}; + +mod mpeg7; + + +/// How many queue entries are loaded and processed in one chunk. +const CHUNK_SIZE: i64 = 100; + +/// Number of times an asset is attempted to fetch in case of unexpected errors. +const MAX_RETRIES: i32 = 12; + +/// How quickly the DB will be checked for new entries in the queue. +const DB_POLL_PERIOD: Duration = Duration::from_secs(30); + +/// How long Tobira will wait initially when backing off (waiting with requests +/// to Opencast). +const INITIAL_BACKOFF: Duration = Duration::from_secs(30); + +/// The maximum time waited during a backoff. +const MAX_BACKOFF: Duration = Duration::from_secs(60 * 30); + + +pub(crate) async fn fetch_update( + mut db: DbConnection, + config: &Config, + daemon: bool, +) -> Result<()> { + let ctx = Context::new(config)?; + + if daemon { + info!("Starting text fetching daemon"); + } else { + info!("Starting to fetch text assets of all queued events"); + } + + let mut backoff = INITIAL_BACKOFF; + let mut did_work_last = false; + loop { + // Check whether there are entries in the queue and whether they are + // ready yet. + let sql = "\ + select now() > fetch_after \ + from event_texts_queue \ + order by fetch_after asc \ + limit 1"; + let queue_state = db.query_opt(sql, &[]).await + .context("failed to check event text queue")? + .map(|row| row.get::<_, bool>(0)); + + if queue_state == Some(true) { + // There are queue entries that are ready + + // We simply propagate the error upwards as this function only + // errors on DB errors. + let outcome = fetch_update_chunk(&ctx, config.sync.concurrent_download_tasks, &mut db) + .await + .context("failed to fetch chunk of asset texts")?; + did_work_last = true; + + match outcome { + SingleUpdateOutcome::Continue => { + // Stuff worked, reset backoff duration. + backoff = INITIAL_BACKOFF; + } + SingleUpdateOutcome::Backoff => { + info!("Some error during text fetching indicates Tobira should backoff \ + for now. Waiting {backoff:.2?}"); + tokio::time::sleep(backoff).await; + backoff = std::cmp::min(MAX_BACKOFF, backoff * 2); + } + } + } else { + // Queue is empty or there are queue entries, but none is ready yet. + // + // TODO: we could also listen for changes on the table by using + // PostgreSQL's LISTEN/NOTIFY. And in addition to that, we would + // sleep until the `fetch_after` timestamp in the DB. But for now, + // this is easier. + if queue_state == Some(false) { + let msg = format!("no queue entries ready to be processed, \ + waiting {DB_POLL_PERIOD:?}"); + if did_work_last { + debug!("{msg}"); + } else { + trace!("{msg}"); + } + } else if daemon { + trace!("text fetch queue is empty, waiting {DB_POLL_PERIOD:?}"); + } else { + info!("No events queued for text fetching, exiting"); + return Ok(()); + } + + did_work_last = false; + tokio::time::sleep(DB_POLL_PERIOD).await; + } + } +} + + +enum SingleUpdateOutcome { + Continue, + /// Whether to stop sending requests to Opencast for some time. + Backoff, +} + +async fn fetch_update_chunk( + ctx: &Context, + concurrent_tasks: u8, + db: &mut DbConnection, +) -> Result { + let tx = db.build_transaction() + .isolation_level(tokio_postgres::IsolationLevel::Serializable) + .start() + .await?; + + // ----- Load some entries from the queue -------------------------------- + let (selection, mapping) = select!( + captions: "events.captions", + slide_text: "events.slide_text", + event_id, + opencast_event_id: "events.opencast_id", + retry_count, + fetch_after, + ); + let sql = format!("select {selection} + from event_texts_queue + inner join events on events.id = event_id + where now() > fetch_after + order by fetch_after asc + limit $1"); + let entries = collect_rows_mapped(tx.query_raw(&sql, dbargs![&CHUNK_SIZE]), |row| Entry { + captions: mapping.captions.of(&row), + slide_text: mapping.slide_text.of(&row), + opencast_event_id: mapping.opencast_event_id.of(&row), + queue: EventTextsQueueRecord { + retry_count: mapping.retry_count.of(&row), + fetch_after: mapping.fetch_after.of(&row), + event_id: mapping.event_id.of(&row), + }, + }).await?; + + + // ----- Fetch assets -------------------------------- + // + // We want to fetch assets in parallel, but with a limited number of + // parallel fetch tasks. To do that, we first create a stream of futures. + let future_iter = entries.into_iter().map(|entry| { + let ctx = ctx.clone(); + async move { + let uris = entry.captions.into_iter() + .map(|caption| (caption.uri, TextAssetType::Caption)) + .chain(entry.slide_text.map(|s| (s, TextAssetType::SlideText))); + + let mut texts = Vec::new(); + for (uri, ty) in uris { + match process_asset(ty, &uri, &entry.opencast_event_id, &ctx).await { + Outcome::Ignore => {}, + Outcome::Retry => { + return (QueueAction::BumpRetryCount, entry.queue, texts); + }, + Outcome::Backoff => { + return (QueueAction::Backoff, entry.queue, texts); + }, + Outcome::Success(t) => { + texts.push(EventTextEntry { + uri, + event_id: entry.queue.event_id, + texts: t, + ty, + fetch_time: Utc::now(), + }); + }, + } + } + + (QueueAction::Remove, entry.queue, texts) + } + }); + let mut stream = futures::stream::iter(future_iter).buffer_unordered(concurrent_tasks.into()); + + // Iterate over the `buffer_unordered` stream and read the results. + let mut texts_to_be_inserted = Vec::new(); + let mut queue_entries_to_be_deleted = Vec::new(); + let mut queue_entries_to_be_bumped = Vec::new(); + let mut out = SingleUpdateOutcome::Continue; + while let Some((queue_action, queue_entry, event_texts)) = stream.next().await { + trace!( + event = ?queue_entry.event_id, + ?queue_action, + "fetched {} text assets", + event_texts.len(), + ); + + texts_to_be_inserted.extend(event_texts); + match queue_action { + QueueAction::Remove => queue_entries_to_be_deleted.push(queue_entry), + QueueAction::BumpRetryCount => { + if queue_entry.retry_count >= MAX_RETRIES { + warn!( + event = ?queue_entry.event_id, + "Giving up fetching texts for event after already trying {} times", + queue_entry.retry_count, + ); + queue_entries_to_be_deleted.push(queue_entry); + } else { + queue_entries_to_be_bumped.push(queue_entry); + } + } + QueueAction::Backoff => { + out = SingleUpdateOutcome::Backoff; + break; + } + } + } + + + // ----- Write all changes to DB ------------------------------------------ + + // Bump retry counts of some queue entries. We use a simple exponential + // backoff. + let sql = "update event_texts_queue \ + set retry_count = retry_count + 1, + fetch_after = now() + interval '1 minute' * pow(2, retry_count) + where event_texts_queue.* = any($1::event_texts_queue[])"; + let bumped_queue_entries = tx.execute(sql,&[&queue_entries_to_be_bumped]).await + .context("failed to update queue entries")?; + + // Write fetched texts to DB and clear old ones for these events. + let event_ids = texts_to_be_inserted.iter().map(|t| t.event_id).collect::>(); + tx.execute("delete from event_texts where event_id = any($1)", &[&event_ids]).await + .context("failed to delete from event_texts")?; + let columns = ["uri", "event_id", "ty", "texts", "fetch_time"]; + let writer = db::util::bulk_insert("event_texts", &columns, &tx).await?; + pin_mut!(writer); + for t in &texts_to_be_inserted { + writer.as_mut() + .write_raw(dbargs![&t.uri, &t.event_id, &t.ty, &t.texts, &t.fetch_time]) + .await?; + } + writer.finish().await?; + + // Remove entries from queue + let sql = "delete from event_texts_queue \ + where event_texts_queue.* = any($1::event_texts_queue[])"; + let removed_queue_entries = tx.execute(sql, &[&queue_entries_to_be_deleted]).await + .context("failed to remove entires from queue")?; + + tx.commit().await.context("failed to commit DB transaction")?; + debug!( + bumped_queue_entries, + removed_queue_entries, + upserted_event_texts = texts_to_be_inserted.len(), + "Persisted event text fetching to DB", + ); + + Ok(out) +} + +#[derive(Debug)] +struct Entry { + captions: Vec, + slide_text: Option, + opencast_event_id: String, + queue: EventTextsQueueRecord, +} + +#[derive(Debug)] +struct EventTextEntry { + uri: String, + event_id: Key, + texts: Vec, + ty: TextAssetType, + fetch_time: DateTime, +} + +#[derive(Clone)] +struct Context { + http_client: reqwest::Client, + is_uri_allowed: Arc bool>, +} + +impl Context { + fn new(config: &Config) -> Result { + let is_uri_allowed = { + let oc = &config.opencast; + let allowed_hosts = [&oc.host, &oc.sync_node, &oc.upload_node] + .into_iter() + .cloned() + .flatten() + .chain(config.opencast.other_hosts.iter().cloned()) + .collect::>(); + Arc::new(move |url: &Url| { + allowed_hosts.iter().any(|allowed| { + url.scheme() == &allowed.scheme + && url.authority() == &allowed.authority + }) + }) + }; + + let http_client = { + use reqwest::header; + + let mut headers = header::HeaderMap::new(); + let mut header_value = header::HeaderValue::try_from( + config.opencast.basic_auth_header().expose_secret() + ).unwrap(); + header_value.set_sensitive(true); + headers.insert(header::AUTHORIZATION, header_value); + + let is_uri_allowed = is_uri_allowed.clone(); + reqwest::Client::builder() + .user_agent("Tobira") + .default_headers(headers) + .redirect(reqwest::redirect::Policy::custom(move |attempt| { + if attempt.previous().len() > 10 { + attempt.error("too many redirects") + } else if is_uri_allowed(attempt.url()) { + attempt.follow() + } else { + attempt.error("redirect to non-trusted host") + } + })) + .build() + .context("failed to build HTTP client")? + }; + + Ok(Self { http_client, is_uri_allowed }) + } +} + +#[derive(Debug)] +enum QueueAction { + /// Remove entry from queue -> we are done with it. + Remove, + + /// Keep entry in queue, but bump the retry count and adjust the + /// `fetch_after` timestamp. + BumpRetryCount, + + /// Tobira should temporarily pause fetching. The queue entry remains in the + /// queue as before. + Backoff, +} + +/// Possible outcomes when operating on a single text asset. +enum Outcome { + /// Operation for this asset failed, but we will ignore it because retrying + /// does not seem worthwhile. Other fetched assets of the same event are + /// written to the DB, though. To retry, the admin has to manually queue + /// the events again. + Ignore, + + /// Operation for this asset failed, likely in a temporary fashion. The + /// event should remain queued so that it can be tried again later. + Retry, + + /// Operation for this asset failed in a way that indicates Opencast is + /// currently not operational. The whole fetching process should pause for + /// some time. The operation will then be tried again. + Backoff, + + /// Operation successful. + Success(T), +} + + + +/// Downloads & parses a single text asset. +#[tracing::instrument(level = "trace", skip(ctx))] +async fn process_asset( + ty: TextAssetType, + uri: &str, + oc_event_id: &str, + ctx: &Context, +) -> Outcome> { + let text = match download_asset(&uri, &oc_event_id, &ctx).await { + Outcome::Ignore => return Outcome::Ignore, + Outcome::Retry => return Outcome::Retry, + Outcome::Backoff => return Outcome::Backoff, + Outcome::Success(text) => text, + }; + + let texts = if uri.ends_with(".vtt") || ty == TextAssetType::Caption { + parse_vtt(text) + } else if uri.ends_with(".xml") || ty == TextAssetType::SlideText { + mpeg7::parse(&text) + } else { + warn!(oc_event_id, uri, "unknown file type of text -> ignoring"); + return Outcome::Ignore; + }; + + match texts { + Ok(t) => Outcome::Success(t), + Err(e) => { + warn!(oc_event_id, uri, "failed to parse file ({e}) -> ignoring"); + Outcome::Ignore + } + } +} + + +async fn download_asset( + uri: &str, + event_oc_id: &str, + ctx: &Context, +) -> Outcome { + trace!(uri, event = event_oc_id, "downloading text asset..."); + + macro_rules! warn { + ($($t:tt)*) => { + tracing::warn!(uri, event = event_oc_id, $($t)*); + }; + } + + let url = match Url::parse(uri) { + Ok(url) => url, + Err(e) => { + warn!("Asset URL is not a valid URL ({e}) -> ignoring"); + return Outcome::Ignore; + } + }; + + if !(ctx.is_uri_allowed)(&url) { + warn!("Host of asset URI does not match any configured Opencast node -> ignoring"); + return Outcome::Ignore; + }; + + let resp = match ctx.http_client.get(uri).send().await { + Ok(r) => r, + Err(e) => { + warn!("Requesting asset failed due to network error ({e}) -> backing off"); + return Outcome::Backoff; + } + }; + + + match resp.status() { + // Expected: all good + StatusCode::OK => {} + + // 500 -> no idea. Will retry a few times later. + StatusCode::INTERNAL_SERVER_ERROR => { + warn!("Requesting asset returned 500 -> trying again later"); + return Outcome::Retry; + } + + // Indications that Opencast is down or is requesting fewer requests. + StatusCode::BAD_GATEWAY + | StatusCode::SERVICE_UNAVAILABLE + | StatusCode::GATEWAY_TIMEOUT + | StatusCode::TOO_MANY_REQUESTS => { + warn!("Requesting asset returned status {} -> backing off", resp.status()); + return Outcome::Backoff; + } + + // TODO: we might want to use caching header in the future + //StatusCode::NOT_MODIFIED + + // All other codes shouldn't happen. We also treat them as temporary + // - 1xx + // - 2xx >= 201 + // - 300 + // - 307, 308 + // - 400, 402, 404, 405 - 428, 431, 451 + // - 401 & 403 -> we use admin user, this shouldn't happen + // - 501, 505, 506, 507, 508, 510, 511 + _ => { + warn!("Requesting asset returned unexpected status {} -> ignoring", resp.status()); + return Outcome::Ignore; + } + } + + match resp.text().await { + Ok(text) => Outcome::Success(text), + Err(e) => { + warn!("Failed to download asset due to network error ({e}) -> backing off"); + Outcome::Backoff + } + } +} + +fn parse_vtt(mut src: String) -> Result> { + fn to_millis(vtt: &subtp::vtt::VttTimestamp) -> i64 { + vtt.milliseconds as i64 + + vtt.seconds as i64 * 1000 + + vtt.minutes as i64 * 1000 * 60 + + vtt.hours as i64 * 1000 * 60 * 60 + } + + // The VTT parser requires a trailing newline, but some files do not have + // that. So we simply push one. + src.push('\n'); + + let vtt = subtp::vtt::WebVtt::parse(&src)?; + let out = vtt.blocks.iter() + .filter_map(|b| match b { + subtp::vtt::VttBlock::Que(cue) => Some(cue), + _ => None, + }) + .map(|cue| { + TimespanText { + span_start: to_millis(&cue.timings.start), + span_end: to_millis(&cue.timings.end), + t: cue.payload.join("\n"), + } + }) + .collect(); + + + Ok(out) +} diff --git a/backend/src/sync/text/mpeg7.rs b/backend/src/sync/text/mpeg7.rs new file mode 100644 index 000000000..432192169 --- /dev/null +++ b/backend/src/sync/text/mpeg7.rs @@ -0,0 +1,325 @@ +use xmlparser::{ElementEnd, Token, Tokenizer}; +use anyhow::{anyhow, bail, Result}; + +use crate::db::types::TimespanText; + + +/// Helper iterator which just skips tokens we are never interested in. +struct Iter<'a>(Tokenizer<'a>); + +impl<'a> Iterator for Iter<'a> { + type Item = as Iterator>::Item; + + fn next(&mut self) -> Option { + self.0.find(|token| { + match token { + Err(_) => true, + Ok(Token::ElementStart { .. }) => true, + Ok(Token::Attribute { .. }) => true, + Ok(Token::ElementEnd { .. }) => true, + Ok(Token::Text { .. }) => true, + Ok(Token::Cdata { .. }) => true, + + // All the stuff we are not interested in + Ok(Token::Declaration { .. }) => false, + Ok(Token::ProcessingInstruction { .. }) => false, + Ok(Token::Comment { .. }) => false, + Ok(Token::DtdStart { .. }) => false, + Ok(Token::EmptyDtd { .. }) => false, + Ok(Token::EntityDeclaration { .. }) => false, + Ok(Token::DtdEnd { .. }) => false, + } + }) + } +} + +/// The expected parents of ``. +const EXPECTED_STACK: &[&str] + = &["Mpeg7", "Description", "MultimediaContent", "Video", "TemporalDecomposition"]; + +/// Somewhat leniently parses an MPEG7 XML document as it occurs in Opencast to +/// specify slide texts. +/// +/// This is not a general spec-compliant MPEG7 parser. Said spec is huge and not +/// even public or easily accessible. There is no existing parser for this in +/// the Rust ecosystem and writing one ourselves is absolutely overkill. So +/// while this doesn't feel super clean, this is a best effort parser to +/// quickly extract the data we are interested in. It is somewhat lenient and +/// tries to ignore extra elements and stuff like that. +pub fn parse(src: &str) -> Result> { + let mut it = Iter(Tokenizer::from(src)); + + let mut out = Vec::new(); + let mut stack = Vec::new(); + while let Some(token) = it.next() { + match token? { + Token::ElementStart { span, .. } => { + let name = &span.as_str()[1..]; + if name == "VideoSegment" && stack == EXPECTED_STACK { + parse_video_segment(&mut it, &mut out)?; + } else { + stack.push(name); + } + }, + Token::ElementEnd { end, .. } => { + if end != ElementEnd::Open { + stack.pop(); + } + } + _ => {}, + } + } + + Ok(out) +} + +/// Parses the `` element, assuming the `ElementStart` is already +/// yielded. +fn parse_video_segment(it: &mut Iter, timespans: &mut Vec) -> Result<()> { + let mut media_time = None; + let mut spatio_td = None; + + parse_children(it, "", |name, it| { + match name { + "MediaTime" => { + media_time = Some(parse_media_time(it)?); + } + "SpatioTemporalDecomposition" => { + spatio_td = Some(parse_spatio_td(it)?); + } + _ => {}, + }; + Ok(()) + })?; + + + let media_time = media_time.ok_or_else(|| { + anyhow!("missing element in ") + })?; + + // If that element does not exist, there are no texts in this segment. + let Some(spatio_td) = spatio_td else { + return Ok(()); + }; + + let span_start = media_time.start as i64; + let span_end = (media_time.start + media_time.duration) as i64; + timespans.extend(spatio_td.texts.into_iter().map(|t| TimespanText { + span_start, + span_end, + t: t.into(), + })); + Ok(()) +} + +/// Both fields in ms +#[derive(Debug)] +struct MediaTime { + start: u64, + duration: u64, +} + +/// Parses the `` element, assuming the `ElementStart` is already +/// yielded. +fn parse_media_time(it: &mut Iter) -> Result { + let mut start = None; + let mut duration = None; + + parse_children(it, "", |name, it| { + match name { + "MediaRelTimePoint" => { + let text = parse_text_content_element(it)?; + start = Some(parse_media_rel_time_point(text)?); + } + "MediaDuration" => { + let text = parse_text_content_element(it)?; + duration = Some(parse_media_duration(text)?); + } + _ => {} + } + Ok(()) + })?; + + Ok(MediaTime { + // The start point might be missing, meaning it starts at the beginning + start: start.unwrap_or(0), + duration: duration + .ok_or_else(|| anyhow!("missing element in "))?, + }) +} + +/// Parses the string inside `` which is close to an ISO 8601 +/// duration, but can contain a fractional seconds suffix. An input looks like +/// this: `PT01M22S920N1000F`. +fn parse_media_duration(s: &str) -> Result { + /// Parses strings like `680N1000F`, generally `d+Nd+F`. Returns the number of + /// milliseconds this represents or `None` if it couldn't be parsed. + fn parse_fractional_seconds(s: &str) -> Option { + let (count, unit) = s.split_once('N')?; + let count: u64 = count.parse().ok()?; + let divisor: u64 = unit.strip_suffix('F')?.parse().ok()?; + Some((count * 1000) / divisor) + } + + let (tail, duration) = iso8601::parsers::parse_duration(s.as_bytes()) + .map_err(|e| anyhow!("failed to parse duration '{s}': {e}"))?; + + match duration { + iso8601::Duration::YMDHMS { year, month, day, hour, minute, second, millisecond } => { + anyhow::ensure!(year == 0 && month == 0); + let tail = &s[s.len() - tail.len()..]; + let fractional = parse_fractional_seconds(tail) + .ok_or_else(|| anyhow!("failed to parse fractional seconds '{tail}'"))?; + let out = fractional + + millisecond as u64 + + second as u64 * 1000 + + minute as u64 * 1000 * 60 + + hour as u64 * 1000 * 60 * 60 + + day as u64 * 1000 * 60 * 60 * 24; + Ok(out) + }, + iso8601::Duration::Weeks(weeks) => { + anyhow::ensure!(tail.is_empty()); + Ok(weeks as u64 * 7 * 24 * 60 * 60 * 1000) + }, + } +} + +/// Parses the string inside of `` which is similar to an ISO +/// 8601 time, but can contain fractional seconds. Input looks like +/// `T00:12:30:440F1000`. +fn parse_media_rel_time_point(s: &str) -> Result { + /// Parses strings like `360F1000`. Returns the number of milliseconds this + /// represents or `None` if it couldn't be parsed. + fn parse_fractional_seconds(s: &str) -> Option { + let (count, unit) = s.split_once('F')?; + let count: u64 = count.parse().ok()?; + let divisor: u64 = unit.parse().ok()?; + Some((count * 1000) / divisor) + } + + let s = s.strip_prefix("T").ok_or_else(|| anyhow!("rel time point does not start with T"))?; + let (tail, time) = iso8601::parsers::parse_time(s.as_bytes()) + .map_err(|e| anyhow!("failed to parse media rel time '{s}': {e}"))?; + + let tail = &s[s.len() - tail.len() + 1..]; + let fractional = parse_fractional_seconds(tail) + .ok_or_else(|| anyhow!("failed to parse fractional seconds '{tail}'"))?; + + let out = fractional + + time.millisecond as u64 + + time.second as u64 * 1000 + + time.minute as u64 * 1000 * 60 + + time.hour as u64 * 1000 * 60 * 60; + Ok(out) +} + + +#[derive(Debug)] +struct SpatioTD<'a> { + texts: Vec<&'a str>, +} + +/// Parses the `` element, assuming the +/// `ElementStart` is already yielded. +fn parse_spatio_td<'a>(it: &mut Iter<'a>) -> Result> { + let mut texts = Vec::new(); + + parse_children(it, "", |name, it| { + if name == "VideoText" { + texts.extend(parse_video_text(it)?); + } + Ok(()) + })?; + + Ok(SpatioTD { texts }) +} + +/// Parses the `` element, assuming the `ElementStart` is already +/// yielded. Ignores the `` and just looks at the `` +/// child. +fn parse_video_text<'a>(it: &mut Iter<'a>) -> Result> { + let mut text = None; + + parse_children(it, "", |name, it| { + if name == "Text" { + text = Some(parse_text_content_element(it)?); + } + Ok(()) + })?; + + Ok(text) +} + +/// Parses a simple element that has only text as its content. Attributes are +/// skipped (if any) and end element is eaten. +fn parse_text_content_element<'a>(it: &mut Iter<'a>) -> Result<&'a str> { + match skip_attrs(it)? { + ElementEnd::Open => {}, + ElementEnd::Close(_, _) => bail!("unexpected element close tag"), + ElementEnd::Empty => return Ok(""), + } + + let Token::Text { text } = it.next().ok_or_else(unexpected_eof)?? else { + bail!("expected text token"); + }; + let Token::ElementEnd { .. } = it.next().ok_or_else(unexpected_eof)?? else { + bail!("expected element end token"); + }; + + Ok(text.as_str()) +} + +/// Helper to parse element with children elements in any order. Attributes are +/// just ignored and the end tag is consumed. +fn parse_children<'a>( + it: &mut Iter<'a>, + end_tag: &str, + mut on_child: impl FnMut(&str, &mut Iter<'a>) -> Result<()>, +) -> Result<()> { + match skip_attrs(it)? { + ElementEnd::Open => {}, + ElementEnd::Close(_, _) => bail!("unexpected element close tag"), + ElementEnd::Empty => return Ok(()), + } + + let mut depth = 0; + loop { + match it.next().ok_or_else(unexpected_eof)?? { + Token::ElementStart { span, .. } => { + let name = &span[1..]; + on_child(name, it)?; + }, + Token::ElementEnd { end, span } => { + if span == end_tag && depth == 0 { + break; + } + if end == ElementEnd::Open { + depth += 1; + } else { + depth -= 1; + } + }, + _ => {} + } + } + + Ok(()) +} + + +/// Can be called after an `ElementStart` token, skipping all its attributes and +/// returns once an `ElementEnd` token is found, which is returned. +fn skip_attrs<'a>(it: &mut Iter<'a>) -> Result> { + loop { + match it.next().ok_or_else(unexpected_eof)?? { + Token::Attribute { .. } => {} + Token::ElementEnd { end, .. } => return Ok(end), + other => bail!("unexpected {other:?}"), + } + } +} + +fn unexpected_eof() -> anyhow::Error { + anyhow::anyhow!("Unexpected EOF") +} diff --git a/backend/src/util.rs b/backend/src/util.rs index 093046aca..4d476e4a8 100644 --- a/backend/src/util.rs +++ b/backend/src/util.rs @@ -4,11 +4,43 @@ use hyper_rustls::{HttpsConnector, HttpsConnectorBuilder}; use hyper_util::client::legacy::{connect::HttpConnector, Client}; use hyperlocal::UnixConnector; use rand::{RngCore, CryptoRng}; -use secrecy::Secret; +use secrecy::SecretBox; use crate::{http::Response, prelude::*}; +/// The URL-safe base64 alphabet. +pub(crate) const BASE64_DIGITS: &[u8; 64] = + b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_"; + +pub(crate) fn base64_decode(ascii: u8) -> Option { + /// The reverse lookup table to `BASE64_DIGITS`. If you index by an ASCII value, you + /// either get the corresponding digit value OR `0xFF`, signalling that the + /// character is not a valid base64 character. + const DECODE_TABLE: [u8; 256] = create_decode_table(); + + const fn create_decode_table() -> [u8; 256] { + let mut out = [0xFF; 256]; + + // If you wonder why we are using `while` instead of a more idiomatic loop: + // const fns are still somewhat limited and do not allow `for`. + let mut i = 0; + while i < BASE64_DIGITS.len() { + out[BASE64_DIGITS[i] as usize] = i as u8; + i += 1; + } + + out + } + let raw = DECODE_TABLE[ascii as usize]; + if raw == 0xFF { + return None; + } + + Some(raw) +} + + /// An empty `enum` for signaling the fact that a function (potentially) never returns. /// Note that you can't construct a value of this type, so a function returning it /// can never return. A function returning `Result` never returns @@ -16,7 +48,7 @@ use crate::{http::Response, prelude::*}; pub(crate) enum Never {} /// Generate random bytes with a crypotgraphically secure RNG. -pub(crate) fn gen_random_bytes_crypto() -> Secret<[u8; N]> { +pub(crate) fn gen_random_bytes_crypto() -> SecretBox<[u8; N]> { // We use this extra function here to make sure we use a // cryptographically secure RNG, even after updating to newer `rand` // versions. Right now, we use `thread_rng` and it is cryptographically @@ -32,7 +64,7 @@ pub(crate) fn gen_random_bytes_crypto() -> Secret<[u8; N]> { bytes } - Secret::new(imp(rand::thread_rng())) + SecretBox::new(Box::new(imp(rand::thread_rng()))) } pub(crate) type HttpsClient = Client, B>; diff --git a/backend/vendor/README.md b/backend/vendor/README.md new file mode 100644 index 000000000..4ded501e1 --- /dev/null +++ b/backend/vendor/README.md @@ -0,0 +1,17 @@ +# Vendored dependencies + +For the backend, we mainly use crates.io dependencies, which can be considered immutable. +Sometimes, we send patches to some of those libraries, which we want to use in Tobira before they are merged/published. +We could use Cargo git-dependencies, but this is problematic for reproducible builds, especially when a fork might be deleted in the future. +In those cases, we vendor these dependencies. +But this is always just temporary and we always want to switch to upstream versions of these dependencies ASAP. + +We also document the exact version used in vendored dependencies in this document: + +## `meilisearch-sdk` + +Base is `40f94024cda09a90b2784121d3237585c7eb8513` with these two PRs applied on top: +- https://github.com/meilisearch/meilisearch-rust/pull/625 (head `40518902db64436778dee233125ebccc9b442bad`) +- https://github.com/meilisearch/meilisearch-rust/pull/632 (head `737b519ddc10561bb4905c706f7b1a8d6d509857`) + +I removed `examples/`, `.git`, `Cargo.lock` (not used anyway if used as library) and a bunch of Rust-unrelated files to shrink the size of this. diff --git a/backend/vendor/meilisearch-sdk/.gitignore b/backend/vendor/meilisearch-sdk/.gitignore new file mode 100644 index 000000000..066c9a27e --- /dev/null +++ b/backend/vendor/meilisearch-sdk/.gitignore @@ -0,0 +1,4 @@ +/target +Cargo.lock +examples/web_app/target/* +.vscode diff --git a/backend/vendor/meilisearch-sdk/CONTRIBUTING.md b/backend/vendor/meilisearch-sdk/CONTRIBUTING.md new file mode 100644 index 000000000..498f8b232 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/CONTRIBUTING.md @@ -0,0 +1,215 @@ +# Contributing + +First of all, thank you for contributing to Meilisearch! The goal of this document is to provide everything you need to know in order to contribute to Meilisearch and its different integrations. + +- [Assumptions](#assumptions) +- [How to Contribute](#how-to-contribute) +- [Development Workflow](#development-workflow) +- [Git Guidelines](#git-guidelines) +- [Release Process (for internal team only)](#release-process-for-internal-team-only) + + +## Assumptions + +1. **You're familiar with [GitHub](https://github.com) and the [Pull Request](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.** +2. **You've read the Meilisearch [documentation](https://www.meilisearch.com/docs) and the [README](/README.md).** +3. **You know about the [Meilisearch community](https://discord.com/invite/meilisearch). Please use this for help.** + +## How to Contribute + +1. Make sure that the contribution you want to make is explained or detailed in a GitHub issue! Find an [existing issue](https://github.com/meilisearch/meilisearch-rust/issues/) or [open a new one](https://github.com/meilisearch/meilisearch-rust/issues/new). +2. Once done, [fork the meilisearch-rust repository](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) in your own GitHub account. Ask a maintainer if you want your issue to be checked before making a PR. +3. [Create a new Git branch](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-and-deleting-branches-within-your-repository). +4. Review the [Development Workflow](#development-workflow) section that describes the steps to maintain the repository. +5. Make the changes on your branch. +6. [Submit the branch as a PR](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-a-pull-request-from-a-fork) pointing to the `main` branch of the main meilisearch-rust repository. A maintainer should comment and/or review your Pull Request within a few days. Although depending on the circumstances, it may take longer.
+ We do not enforce a naming convention for the PRs, but **please use something descriptive of your changes**, having in mind that the title of your PR will be automatically added to the next [release changelog](https://github.com/meilisearch/meilisearch-rust/releases/). + +## Development Workflow + +You can set up your local environment natively or using `docker`, check out the [`docker-compose.yml`](/docker-compose.yml). + +Example of running all the checks with docker: +```bash +docker-compose run --rm package bash -c "cargo test" +``` + +To install dependencies: + +```bash +cargo build --release +``` + +To ensure the same dependency versions in all environments, for example the CI, update the dependencies by running: `cargo update`. + +### Tests + +To run the tests, run: + +```bash +# Tests +curl -L https://install.meilisearch.com | sh # download Meilisearch +./meilisearch --master-key=masterKey --no-analytics # run Meilisearch +cargo test +``` + +There are two kinds of tests, documentation tests and unit tests. +If you need to write or read the unit tests you should consider reading this +[readme](meilisearch-test-macro/README.md) about our custom testing macro. + +Also, the WASM example compilation should be checked: + +```bash +rustup target add wasm32-unknown-unknown +cargo check -p web_app --target wasm32-unknown-unknown +``` + +Each PR should pass the tests to be accepted. + +### Clippy + +Each PR should pass [`clippy`](https://github.com/rust-lang/rust-clippy) (the linter) to be accepted. + +```bash +cargo clippy -- -D warnings +``` + +If you don't have `clippy` installed on your machine yet, run: + +```bash +rustup update +rustup component add clippy +``` + +⚠️ Also, if you have installed `clippy` a long time ago, you might need to update it: + +```bash +rustup update +``` + +### Fmt + +Each PR should pass the format test to be accepted. + +Run the following to fix the formatting errors: + +``` +cargo fmt +``` + +and the following to test if the formatting is correct: +``` +cargo fmt --all -- --check +``` + +### Update the README + +The README is generated. Please do not update manually the `README.md` file. + +Instead, update the `README.tpl` and `src/lib.rs` files, and run: + +```sh +sh scripts/update-readme.sh +``` + +Then, push the changed files. + +You can check the current `README.md` is up-to-date by running: + +```sh +sh scripts/check-readme.sh +# To see the diff +sh scripts/check-readme.sh --diff +``` + +If it's not, the CI will fail on your PR. + +### Yaml lint + +To check if your `yaml` files are correctly formatted, you need to [install yamllint](https://yamllint.readthedocs.io/en/stable/quickstart.html#installing-yamllint) and then run `yamllint .` + +## Git Guidelines + +### Git Branches + +All changes must be made in a branch and submitted as PR. +We do not enforce any branch naming style, but please use something descriptive of your changes. + +### Git Commits + +As minimal requirements, your commit message should: +- be capitalized +- not finished by a dot or any other punctuation character (!,?) +- start with a verb so that we can read your commit message this way: "This commit will ...", where "..." is the commit message. + e.g.: "Fix the home page button" or "Add more tests for create_index method" + +We don't follow any other convention, but if you want to use one, we recommend [this one](https://chris.beams.io/posts/git-commit/). + +### GitHub Pull Requests + +Some notes on GitHub PRs: + +- [Convert your PR as a draft](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/changing-the-stage-of-a-pull-request) if your changes are a work in progress: no one will review it until you pass your PR as ready for review.
+ The draft PR can be very useful if you want to show that you are working on something and make your work visible. +- The branch related to the PR must be **up-to-date with `main`** before merging. Fortunately, this project [integrates a bot](https://github.com/meilisearch/integration-guides/blob/main/resources/bors.md) to automatically enforce this requirement without the PR author having to do it manually. +- All PRs must be reviewed and approved by at least one maintainer. +- The PR title should be accurate and descriptive of the changes. The title of the PR will be indeed automatically added to the next [release changelogs](https://github.com/meilisearch/meilisearch-rust/releases/). + +## Release Process (for the internal team only) + +Meilisearch tools follow the [Semantic Versioning Convention](https://semver.org/). + +### Automation to Rebase and Merge the PRs + +This project integrates a bot that helps us manage pull requests merging.
+_[Read more about this](https://github.com/meilisearch/integration-guides/blob/main/resources/bors.md)._ + +### Automated Changelogs + +This project integrates a tool to create automated changelogs.
+_[Read more about this](https://github.com/meilisearch/integration-guides/blob/main/resources/release-drafter.md)._ + +### How to Publish the Release + +⚠️ Before doing anything, make sure you get through the guide about [Releasing an Integration](https://github.com/meilisearch/integration-guides/blob/main/resources/integration-release.md). + +Make a PR modifying the file [`Cargo.toml`](/Cargo.toml): + +```toml +version = "X.X.X" +``` + +the [`README.tpl`](/README.tpl): + +```rust +//! meilisearch-sdk = "X.X.X" +``` + +and the [code-samples file](/.code-samples.meilisearch.yaml): + +```yml + meilisearch-sdk = "X.X.X" +``` + +with the right version. + + +After the changes on `Cargo.toml`, run the following command: + +``` +sh scripts/update_macro_versions.sh +``` + +After the changes on `lib.rs`, run the following command: + +```bash +sh scripts/update-readme.sh +``` + +Once the changes are merged on `main`, you can publish the current draft release via the [GitHub interface](https://github.com/meilisearch/meilisearch-rust/releases): on this page, click on `Edit` (related to the draft release) > update the description (be sure you apply [these recommendations](https://github.com/meilisearch/integration-guides/blob/main/resources/integration-release.md#writting-the-release-description)) > when you are ready, click on `Publish release`. + +GitHub Actions will be triggered and push the package to [crates.io](https://crates.io/crates/meilisearch-sdk). + +
+ +Thank you again for reading this through. We cannot wait to begin to work with you if you make your way through this contributing guide ❤️ diff --git a/backend/vendor/meilisearch-sdk/Cargo.toml b/backend/vendor/meilisearch-sdk/Cargo.toml new file mode 100644 index 000000000..b100a1225 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/Cargo.toml @@ -0,0 +1,61 @@ +[package] +name = "meilisearch-sdk" +version = "0.27.1" +authors = ["Mubelotix "] +edition = "2018" +description = "Rust wrapper for the Meilisearch API. Meilisearch is a powerful, fast, open-source, easy to use and deploy search engine." +license = "MIT" +readme = "README.md" +repository = "https://github.com/meilisearch/meilisearch-sdk" +resolver = "2" + +[workspace] +members = ["examples/*"] + +[dependencies] +async-trait = "0.1.51" +iso8601 = "0.6.1" +log = "0.4" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing"] } +yaup = "0.3.1" +either = { version = "1.8.0", features = ["serde"] } +thiserror = "1.0.37" +meilisearch-index-setting-macro = { path = "meilisearch-index-setting-macro", version = "0.27.1" } +pin-project-lite = { version = "0.2.13", optional = true } +reqwest = { version = "0.12.3", optional = true, default-features = false, features = ["rustls-tls", "http2", "stream"] } +bytes = { version = "1.6", optional = true } +uuid = { version = "1.1.2", features = ["v4"] } +futures-io = "0.3.30" +futures = "0.3" + +[target.'cfg(not(target_arch = "wasm32"))'.dependencies] +jsonwebtoken = { version = "9", default-features = false } + +[target.'cfg(target_arch = "wasm32")'.dependencies] +uuid = { version = "1.8.0", default-features = false, features = ["v4", "js"] } +web-sys = "0.3" +wasm-bindgen-futures = "0.4" + +[features] +default = ["reqwest"] +reqwest = ["dep:reqwest", "pin-project-lite", "bytes"] +futures-unsend = [] + +[dev-dependencies] +futures-await-test = "0.3" +futures = "0.3" +mockito = "1.0.0" +meilisearch-test-macro = { path = "meilisearch-test-macro" } +tokio = { version = "1", features = ["rt", "macros"] } + +# The following dependencies are required for examples +wasm-bindgen = "0.2" +wasm-bindgen-futures = "0.4" +yew = "0.21" +lazy_static = "1.4" +web-sys = "0.3" +console_error_panic_hook = "0.1" +big_s = "1.0.2" +insta = "1.38.0" diff --git a/backend/vendor/meilisearch-sdk/LICENSE b/backend/vendor/meilisearch-sdk/LICENSE new file mode 100644 index 000000000..1b9f856e3 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020-2025 Meili SAS + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/backend/vendor/meilisearch-sdk/README.md b/backend/vendor/meilisearch-sdk/README.md new file mode 100644 index 000000000..f70c3f907 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/README.md @@ -0,0 +1,258 @@ + + + + +

+ Meilisearch-Rust +

+ +

Meilisearch Rust SDK

+ +

+ Meilisearch | + Meilisearch Cloud | + Documentation | + Discord | + Roadmap | + Website | + FAQ +

+ +

+ crates.io + Tests + License + + Bors enabled +

+ +

⚡ The Meilisearch API client written for Rust 🦀

+ +**Meilisearch Rust** is the Meilisearch API client for Rust developers. + +**Meilisearch** is an open-source search engine. [Learn more about Meilisearch.](https://github.com/meilisearch/meilisearch) + +## Table of Contents + +- [📖 Documentation](#-documentation) +- [🔧 Installation](#-installation) +- [🚀 Getting started](#-getting-started) +- [🌐 Running in the Browser with WASM](#-running-in-the-browser-with-wasm) +- [🤖 Compatibility with Meilisearch](#-compatibility-with-meilisearch) +- [⚙️ Contributing](#️-contributing) + +## 📖 Documentation + +This readme contains all the documentation you need to start using this Meilisearch SDK. + +For general information on how to use Meilisearch—such as our API reference, tutorials, guides, and in-depth articles—refer to our [main documentation website](https://www.meilisearch.com/docs). + +## 🔧 Installation + +To use `meilisearch-sdk`, add this to your `Cargo.toml`: + +```toml +[dependencies] +meilisearch-sdk = "0.27.1" +``` + +The following optional dependencies may also be useful: + +```toml +futures = "0.3" # To be able to block on async functions if you are not using an async runtime +serde = { version = "1.0", features = ["derive"] } +``` + +This crate is `async` but you can choose to use an async runtime like [tokio](https://crates.io/crates/tokio) or just [block on futures](https://docs.rs/futures/latest/futures/executor/fn.block_on.html). +You can enable the `sync` feature to make most structs `Sync`. It may be a bit slower. + +Using this crate is possible without [serde](https://crates.io/crates/serde), but a lot of features require serde. + +### Run Meilisearch + +⚡️ **Launch, scale, and streamline in minutes with Meilisearch Cloud**—no maintenance, no commitment, cancel anytime. [Try it free now](https://cloud.meilisearch.com/login?utm_campaign=oss&utm_source=github&utm_medium=meilisearch-rust). + +🪨 Prefer to self-host? [Download and deploy](https://www.meilisearch.com/docs/learn/self_hosted/getting_started_with_self_hosted_meilisearch?utm_campaign=oss&utm_source=github&utm_medium=meilisearch-rust) our fast, open-source search engine on your own infrastructure. + +## 🚀 Getting started + +#### Add Documents + +```rust +use meilisearch_sdk::client::*; +use serde::{Serialize, Deserialize}; +use futures::executor::block_on; + +#[derive(Serialize, Deserialize, Debug)] +struct Movie { + id: usize, + title: String, + genres: Vec, +} + + +#[tokio::main(flavor = "current_thread")] +async fn main() { + // Create a client (without sending any request so that can't fail) + let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + + // An index is where the documents are stored. + let movies = client.index("movies"); + + // Add some movies in the index. If the index 'movies' does not exist, Meilisearch creates it when you first add the documents. + movies.add_documents(&[ + Movie { id: 1, title: String::from("Carol"), genres: vec!["Romance".to_string(), "Drama".to_string()] }, + Movie { id: 2, title: String::from("Wonder Woman"), genres: vec!["Action".to_string(), "Adventure".to_string()] }, + Movie { id: 3, title: String::from("Life of Pi"), genres: vec!["Adventure".to_string(), "Drama".to_string()] }, + Movie { id: 4, title: String::from("Mad Max"), genres: vec!["Adventure".to_string(), "Science Fiction".to_string()] }, + Movie { id: 5, title: String::from("Moana"), genres: vec!["Fantasy".to_string(), "Action".to_string()] }, + Movie { id: 6, title: String::from("Philadelphia"), genres: vec!["Drama".to_string()] }, + ], Some("id")).await.unwrap(); +} +``` + +With the `uid`, you can check the status (`enqueued`, `canceled`, `processing`, `succeeded` or `failed`) of your documents addition using the [task](https://www.meilisearch.com/docs/reference/api/tasks#get-task). + +#### Basic Search + +```rust +// Meilisearch is typo-tolerant: +println!("{:?}", client.index("movies_2").search().with_query("caorl").execute::().await.unwrap().hits); +``` + +Output: +``` +[Movie { id: 1, title: String::from("Carol"), genres: vec!["Romance", "Drama"] }] +``` + +Json output: +```json +{ + "hits": [{ + "id": 1, + "title": "Carol", + "genres": ["Romance", "Drama"] + }], + "offset": 0, + "limit": 10, + "processingTimeMs": 1, + "query": "caorl" +} +``` + +#### Custom Search + +```rust +let search_result = client.index("movies_3") + .search() + .with_query("phil") + .with_attributes_to_highlight(Selectors::Some(&["*"])) + .execute::() + .await + .unwrap(); +println!("{:?}", search_result.hits); +``` + +Json output: +```json +{ + "hits": [ + { + "id": 6, + "title": "Philadelphia", + "_formatted": { + "id": 6, + "title": "Philadelphia", + "genre": ["Drama"] + } + } + ], + "offset": 0, + "limit": 20, + "processingTimeMs": 0, + "query": "phil" +} +``` + +#### Custom Search With Filters + +If you want to enable filtering, you must add your attributes to the `filterableAttributes` +index setting. + +```rust +let filterable_attributes = [ + "id", + "genres", +]; +client.index("movies_4").set_filterable_attributes(&filterable_attributes).await.unwrap(); +``` + +You only need to perform this operation once. + +Note that Meilisearch will rebuild your index whenever you update `filterableAttributes`. Depending on the size of your dataset, this might take time. You can track the process using the [tasks](https://www.meilisearch.com/docs/reference/api/tasks#get-task). + +Then, you can perform the search: + +```rust +let search_result = client.index("movies_5") + .search() + .with_query("wonder") + .with_filter("id > 1 AND genres = Action") + .execute::() + .await + .unwrap(); +println!("{:?}", search_result.hits); +``` + +Json output: +```json +{ + "hits": [ + { + "id": 2, + "title": "Wonder Woman", + "genres": ["Action", "Adventure"] + } + ], + "offset": 0, + "limit": 20, + "estimatedTotalHits": 1, + "processingTimeMs": 0, + "query": "wonder" +} +``` + +#### Customize the `HttpClient` + +By default, the SDK uses [`reqwest`](https://docs.rs/reqwest/latest/reqwest/) to make http calls. +The SDK lets you customize the http client by implementing the `HttpClient` trait yourself and +initializing the `Client` with the `new_with_client` method. +You may be interested by the `futures-unsend` feature which lets you specify a non-Send http client. + +#### Wasm support + +The SDK supports wasm through reqwest. You'll need to enable the `futures-unsend` feature while importing it, though. + +## 🌐 Running in the Browser with WASM + +This crate fully supports WASM. + +The only difference between the WASM and the native version is that the native version has one more variant (`Error::Http`) in the Error enum. That should not matter so much but we could add this variant in WASM too. + +However, making a program intended to run in a web browser requires a **very** different design than a CLI program. To see an example of a simple Rust web app using Meilisearch, see the [our demo](./examples/web_app). + +WARNING: `meilisearch-sdk` will panic if no Window is available (ex: Web extension). + +## 🤖 Compatibility with Meilisearch + +This package guarantees compatibility with [version v1.x of Meilisearch](https://github.com/meilisearch/meilisearch/releases/latest), but some features may not be present. Please check the [issues](https://github.com/meilisearch/meilisearch-rust/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22+label%3Aenhancement) for more info. + +## ⚙️ Contributing + +Any new contribution is more than welcome in this project! + +If you want to know more about the development workflow or want to contribute, please visit our [contributing guidelines](/CONTRIBUTING.md) for detailed instructions! + +
+ +**Meilisearch** provides and maintains many **SDKs and Integration tools** like this one. We want to provide everyone with an **amazing search experience for any kind of project**. If you want to contribute, make suggestions, or just know what's going on right now, visit us in the [integration-guides](https://github.com/meilisearch/integration-guides) repository. diff --git a/backend/vendor/meilisearch-sdk/README.tpl b/backend/vendor/meilisearch-sdk/README.tpl new file mode 100644 index 000000000..6fec810f9 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/README.tpl @@ -0,0 +1,101 @@ + + + + +

+ Meilisearch-Rust +

+ +

Meilisearch Rust SDK

+ +

+ Meilisearch | + Meilisearch Cloud | + Documentation | + Discord | + Roadmap | + Website | + FAQ +

+ +

+ crates.io + Tests + License + + Bors enabled +

+ +

⚡ The Meilisearch API client written for Rust 🦀

+ +**Meilisearch Rust** is the Meilisearch API client for Rust developers. + +**Meilisearch** is an open-source search engine. [Learn more about Meilisearch.](https://github.com/meilisearch/meilisearch) + +## Table of Contents + +- [📖 Documentation](#-documentation) +- [🔧 Installation](#-installation) +- [🚀 Getting started](#-getting-started) +- [🌐 Running in the Browser with WASM](#-running-in-the-browser-with-wasm) +- [🤖 Compatibility with Meilisearch](#-compatibility-with-meilisearch) +- [⚙️ Contributing](#️-contributing) + +## 📖 Documentation + +This readme contains all the documentation you need to start using this Meilisearch SDK. + +For general information on how to use Meilisearch—such as our API reference, tutorials, guides, and in-depth articles—refer to our [main documentation website](https://www.meilisearch.com/docs). + +## 🔧 Installation + +To use `meilisearch-sdk`, add this to your `Cargo.toml`: + +```toml +[dependencies] +meilisearch-sdk = "0.27.1" +``` + +The following optional dependencies may also be useful: + +```toml +futures = "0.3" # To be able to block on async functions if you are not using an async runtime +serde = { version = "1.0", features = ["derive"] } +``` + +This crate is `async` but you can choose to use an async runtime like [tokio](https://crates.io/crates/tokio) or just [block on futures](https://docs.rs/futures/latest/futures/executor/fn.block_on.html). +You can enable the `sync` feature to make most structs `Sync`. It may be a bit slower. + +Using this crate is possible without [serde](https://crates.io/crates/serde), but a lot of features require serde. + +### Run Meilisearch + +⚡️ **Launch, scale, and streamline in minutes with Meilisearch Cloud**—no maintenance, no commitment, cancel anytime. [Try it free now](https://cloud.meilisearch.com/login?utm_campaign=oss&utm_source=github&utm_medium=meilisearch-rust). + +🪨 Prefer to self-host? [Download and deploy](https://www.meilisearch.com/docs/learn/self_hosted/getting_started_with_self_hosted_meilisearch?utm_campaign=oss&utm_source=github&utm_medium=meilisearch-rust) our fast, open-source search engine on your own infrastructure. + +{{readme}} + +## 🌐 Running in the Browser with WASM + +This crate fully supports WASM. + +The only difference between the WASM and the native version is that the native version has one more variant (`Error::Http`) in the Error enum. That should not matter so much but we could add this variant in WASM too. + +However, making a program intended to run in a web browser requires a **very** different design than a CLI program. To see an example of a simple Rust web app using Meilisearch, see the [our demo](./examples/web_app). + +WARNING: `meilisearch-sdk` will panic if no Window is available (ex: Web extension). + +## 🤖 Compatibility with Meilisearch + +This package guarantees compatibility with [version v1.x of Meilisearch](https://github.com/meilisearch/meilisearch/releases/latest), but some features may not be present. Please check the [issues](https://github.com/meilisearch/meilisearch-rust/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22+label%3Aenhancement) for more info. + +## ⚙️ Contributing + +Any new contribution is more than welcome in this project! + +If you want to know more about the development workflow or want to contribute, please visit our [contributing guidelines](/CONTRIBUTING.md) for detailed instructions! + +
+ +**Meilisearch** provides and maintains many **SDKs and Integration tools** like this one. We want to provide everyone with an **amazing search experience for any kind of project**. If you want to contribute, make suggestions, or just know what's going on right now, visit us in the [integration-guides](https://github.com/meilisearch/integration-guides) repository. diff --git a/backend/vendor/meilisearch-sdk/bors.toml b/backend/vendor/meilisearch-sdk/bors.toml new file mode 100644 index 000000000..83ffb5041 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/bors.toml @@ -0,0 +1,10 @@ +status = [ + 'integration-tests', + 'clippy-check', + 'rust-format', + 'readme-check', + 'wasm-build', + 'Yaml linting check' +] +# 1 hour timeout +timeout-sec = 3600 diff --git a/backend/vendor/meilisearch-sdk/docker-compose.yml b/backend/vendor/meilisearch-sdk/docker-compose.yml new file mode 100644 index 000000000..edd0bcf3c --- /dev/null +++ b/backend/vendor/meilisearch-sdk/docker-compose.yml @@ -0,0 +1,31 @@ +version: "3.8" + +# remove this line if you don't need a volume to map your dependencies +# Check how to cache the build +volumes: + cargo: + +services: + package: + image: rust:1 + tty: true + stdin_open: true + working_dir: /home/package + environment: + - MEILISEARCH_URL=http://meilisearch:7700 + - CARGO_HOME=/vendor/cargo + depends_on: + - meilisearch + links: + - meilisearch + volumes: + - ./:/home/package + - cargo:/vendor/cargo + + meilisearch: + image: getmeili/meilisearch:latest + ports: + - "7700" + environment: + - MEILI_MASTER_KEY=masterKey + - MEILI_NO_ANALYTICS=true diff --git a/backend/vendor/meilisearch-sdk/meilisearch-index-setting-macro/Cargo.toml b/backend/vendor/meilisearch-sdk/meilisearch-index-setting-macro/Cargo.toml new file mode 100644 index 000000000..4f921389a --- /dev/null +++ b/backend/vendor/meilisearch-sdk/meilisearch-index-setting-macro/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "meilisearch-index-setting-macro" +version = "0.27.1" +description = "Helper tool to generate settings of a Meilisearch index" +edition = "2021" +license = "MIT" +repository = "https://github.com/meilisearch/meilisearch-rust" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[lib] +proc-macro = true + +[dependencies] +syn = { version = "2.0.48", features = ["extra-traits"] } +quote = "1.0.21" +proc-macro2 = "1.0.46" +convert_case = "0.6.0" +structmeta = "0.3" diff --git a/backend/vendor/meilisearch-sdk/meilisearch-index-setting-macro/src/lib.rs b/backend/vendor/meilisearch-sdk/meilisearch-index-setting-macro/src/lib.rs new file mode 100644 index 000000000..23d89f065 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/meilisearch-index-setting-macro/src/lib.rs @@ -0,0 +1,191 @@ +use convert_case::{Case, Casing}; +use proc_macro2::Ident; +use quote::quote; +use structmeta::{Flag, StructMeta}; +use syn::{parse_macro_input, spanned::Spanned}; + +#[derive(Clone, StructMeta, Default)] +struct FieldAttrs { + primary_key: Flag, + displayed: Flag, + searchable: Flag, + distinct: Flag, + filterable: Flag, + sortable: Flag, +} + +#[proc_macro_derive(IndexConfig, attributes(index_config))] +pub fn generate_index_settings(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let ast = parse_macro_input!(input as syn::DeriveInput); + + let fields: &syn::Fields = match ast.data { + syn::Data::Struct(ref data) => &data.fields, + _ => { + return proc_macro::TokenStream::from( + syn::Error::new(ast.ident.span(), "Applicable only to struct").to_compile_error(), + ); + } + }; + + let struct_ident = &ast.ident; + + let index_config_implementation = get_index_config_implementation(struct_ident, fields); + proc_macro::TokenStream::from(quote! { + #index_config_implementation + }) +} + +fn get_index_config_implementation( + struct_ident: &Ident, + fields: &syn::Fields, +) -> proc_macro2::TokenStream { + let mut primary_key_attribute = String::new(); + let mut distinct_key_attribute = String::new(); + let mut displayed_attributes = vec![]; + let mut searchable_attributes = vec![]; + let mut filterable_attributes = vec![]; + let mut sortable_attributes = vec![]; + + let index_name = struct_ident + .to_string() + .from_case(Case::UpperCamel) + .to_case(Case::Snake); + + let mut primary_key_found = false; + let mut distinct_found = false; + + for field in fields { + let attrs = field + .attrs + .iter() + .filter(|attr| attr.path().is_ident("index_config")) + .map(|attr| attr.parse_args::().unwrap()) + .collect::>() + .first() + .cloned() + .unwrap_or_default(); + + // Check if the primary key field is unique + if attrs.primary_key.value() { + if primary_key_found { + return syn::Error::new( + field.span(), + "Only one field can be marked as primary key", + ) + .to_compile_error(); + } + primary_key_attribute = field.ident.clone().unwrap().to_string(); + primary_key_found = true; + } + + // Check if the distinct field is unique + if attrs.distinct.value() { + if distinct_found { + return syn::Error::new(field.span(), "Only one field can be marked as distinct") + .to_compile_error(); + } + distinct_key_attribute = field.ident.clone().unwrap().to_string(); + distinct_found = true; + } + + if attrs.displayed.value() { + displayed_attributes.push(field.ident.clone().unwrap().to_string()); + } + + if attrs.searchable.value() { + searchable_attributes.push(field.ident.clone().unwrap().to_string()); + } + + if attrs.filterable.value() { + filterable_attributes.push(field.ident.clone().unwrap().to_string()); + } + + if attrs.sortable.value() { + sortable_attributes.push(field.ident.clone().unwrap().to_string()); + } + } + + let primary_key_token: proc_macro2::TokenStream = if primary_key_attribute.is_empty() { + quote! { + ::std::option::Option::None + } + } else { + quote! { + ::std::option::Option::Some(#primary_key_attribute) + } + }; + + let display_attr_tokens = + get_settings_token_for_list(&displayed_attributes, "with_displayed_attributes"); + let sortable_attr_tokens = + get_settings_token_for_list(&sortable_attributes, "with_sortable_attributes"); + let filterable_attr_tokens = + get_settings_token_for_list(&filterable_attributes, "with_filterable_attributes"); + let searchable_attr_tokens = + get_settings_token_for_list(&searchable_attributes, "with_searchable_attributes"); + let distinct_attr_token = get_settings_token_for_string_for_some_string( + &distinct_key_attribute, + "with_distinct_attribute", + ); + + quote! { + #[::meilisearch_sdk::macro_helper::async_trait(?Send)] + impl ::meilisearch_sdk::documents::IndexConfig for #struct_ident { + const INDEX_STR: &'static str = #index_name; + + fn generate_settings() -> ::meilisearch_sdk::settings::Settings { + ::meilisearch_sdk::settings::Settings::new() + #display_attr_tokens + #sortable_attr_tokens + #filterable_attr_tokens + #searchable_attr_tokens + #distinct_attr_token + } + + async fn generate_index(client: &::meilisearch_sdk::client::Client) -> std::result::Result<::meilisearch_sdk::indexes::Index, ::meilisearch_sdk::tasks::Task> { + return client.create_index(#index_name, #primary_key_token) + .await.unwrap() + .wait_for_completion(&client, ::std::option::Option::None, ::std::option::Option::None) + .await.unwrap() + .try_make_index(&client); + } + } + } +} + +fn get_settings_token_for_list( + field_name_list: &[String], + method_name: &str, +) -> proc_macro2::TokenStream { + let string_attributes = field_name_list.iter().map(|attr| { + quote! { + #attr + } + }); + let method_ident = Ident::new(method_name, proc_macro2::Span::call_site()); + + if field_name_list.is_empty() { + quote! { + .#method_ident(::std::iter::empty::<&str>()) + } + } else { + quote! { + .#method_ident([#(#string_attributes),*]) + } + } +} + +fn get_settings_token_for_string_for_some_string( + field_name: &String, + method_name: &str, +) -> proc_macro2::TokenStream { + let method_ident = Ident::new(method_name, proc_macro2::Span::call_site()); + + if field_name.is_empty() { + proc_macro2::TokenStream::new() + } else { + quote! { + .#method_ident(::std::option::Option::Some(#field_name)) + } + } +} diff --git a/backend/vendor/meilisearch-sdk/meilisearch-test-macro/Cargo.toml b/backend/vendor/meilisearch-sdk/meilisearch-test-macro/Cargo.toml new file mode 100644 index 000000000..08098d5d3 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/meilisearch-test-macro/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "meilisearch-test-macro" +version = "0.0.0" +edition = "2021" +publish = false + +[lib] +proc-macro = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +proc-macro2 = "1.0.0" +quote = "1.0.0" +syn = { version = "2.0.48", features = ["clone-impls", "full", "parsing", "printing", "proc-macro"], default-features = false } diff --git a/backend/vendor/meilisearch-sdk/meilisearch-test-macro/README.md b/backend/vendor/meilisearch-sdk/meilisearch-test-macro/README.md new file mode 100644 index 000000000..1d794b690 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/meilisearch-test-macro/README.md @@ -0,0 +1,78 @@ +# Meilisearch test macro + +This crate defines the `meilisearch_test` macro. + +Since the code is a little bit harsh to read, here is a complete explanation of how to use it. +The macro aims to ease the writing of tests by: + +1. Reducing the amount of code you need to write and maintain for each test. +2. Ensuring All your indexes as a unique name so they can all run in parallel. +3. Ensuring you never forget to delete your index if you need one. + +Before explaining its usage, we're going to see a simple test _before_ this macro: + +```rust +#[async_test] +async fn test_get_tasks() -> Result<(), Error> { + let client = Client::new(MEILISEARCH_URL, MEILISEARCH_API_KEY); + + let index = client + .create_index("test_get_tasks", None) + .await? + .wait_for_completion(&client, None, None) + .await? + .try_make_index(&client) + .unwrap(); + + let tasks = index.get_tasks().await?; + // The only task is the creation of the index + assert_eq!(status.results.len(), 1); + + index.delete() + .await? + .wait_for_completion(&client, None, None) + .await?; + Ok(()) +} +``` + +I have multiple problems with this test: + +- `let client = Client::new(MEILISEARCH_URL, MEILISEARCH_API_KEY);`: This line is always the same in every test. + And if you make a typo on the http addr or the master key, you'll have an error. +- `let index = client.create_index("test_get_tasks", None)...`: Each test needs to have an unique name. + This means we currently need to write the name of the test everywhere; it's not practical. +- There are 11 lines dedicated to the creation and deletion of the index; this is once again something that'll never change + whatever the test is. But, if you ever forget to delete the index at the end, you'll get in some trouble to re-run + the tests. + +--- + +With this macro, all these problems are solved. See a rewrite of this test: + +```rust +#[meilisearch_test] +async fn test_get_tasks(index: Index, client: Client) -> Result<(), Error> { + let tasks = index.get_tasks().await?; + // The only task is the creation of the index + assert_eq!(status.results.len(), 1); +} +``` + +So now you're probably seeing what happened. By using an index and a client in the parameter of +the test, the macro automatically did the same thing we've seen before. +There are a few rules, though: + +1. The macro only handles three types of arguments: + +- `String`: It returns the name of the test. +- `Client`: It creates a client like that: `Client::new("http://localhost:7700", "masterKey")`. +- `Index`: It creates and deletes an index, as we've seen before. + +2. You only get what you asked for. That means if you don't ask for an index, no index will be created in meilisearch. + So, if you are testing the creation of indexes, you can ask for a `Client` and a `String` and then create it yourself. + The index won't be present in meilisearch. +3. You can put your parameters in the order you want it won't change anything. +4. Everything you use **must** be in scope directly. If you're using an `Index`, you must write `Index` in the parameters, + not `meilisearch_rust::Index` or `crate::Index`. +5. And I think that's all, use and abuse it 🎉 diff --git a/backend/vendor/meilisearch-sdk/meilisearch-test-macro/src/lib.rs b/backend/vendor/meilisearch-sdk/meilisearch-test-macro/src/lib.rs new file mode 100644 index 000000000..28d4a440e --- /dev/null +++ b/backend/vendor/meilisearch-sdk/meilisearch-test-macro/src/lib.rs @@ -0,0 +1,184 @@ +#![recursion_limit = "4096"] + +extern crate proc_macro; + +use proc_macro::TokenStream; +use proc_macro2::Span; +use quote::quote; +use syn::{ + parse_macro_input, parse_quote, Expr, FnArg, Ident, Item, PatType, Path, Stmt, Type, TypePath, + Visibility, +}; + +#[proc_macro_attribute] +pub fn meilisearch_test(params: TokenStream, input: TokenStream) -> TokenStream { + assert!( + params.is_empty(), + "the #[async_test] attribute currently does not take parameters" + ); + + let mut inner = parse_macro_input!(input as Item); + let mut outer = inner.clone(); + if let (&mut Item::Fn(ref mut inner_fn), &mut Item::Fn(ref mut outer_fn)) = + (&mut inner, &mut outer) + { + #[derive(Debug, PartialEq, Eq)] + enum Param { + Client, + Index, + String, + } + + inner_fn.sig.ident = Ident::new( + &("_inner_meilisearch_test_macro_".to_string() + &inner_fn.sig.ident.to_string()), + Span::call_site(), + ); + let inner_ident = &inner_fn.sig.ident; + inner_fn.vis = Visibility::Inherited; + inner_fn.attrs.clear(); + assert!( + outer_fn.sig.asyncness.take().is_some(), + "#[meilisearch_test] can only be applied to async functions" + ); + + let mut params = Vec::new(); + + let parameters = &inner_fn.sig.inputs; + for param in parameters { + match param { + FnArg::Typed(PatType { ty, .. }) => match &**ty { + Type::Path(TypePath { path: Path { segments, .. }, .. } ) if segments.last().unwrap().ident == "String" => { + params.push(Param::String); + } + Type::Path(TypePath { path: Path { segments, .. }, .. } ) if segments.last().unwrap().ident == "Index" => { + params.push(Param::Index); + } + Type::Path(TypePath { path: Path { segments, .. }, .. } ) if segments.last().unwrap().ident == "Client" => { + params.push(Param::Client); + } + // TODO: throw this error while pointing to the specific token + ty => panic!( + "#[meilisearch_test] can only receive Client, Index or String as parameters but received {ty:?}" + ), + }, + // TODO: throw this error while pointing to the specific token + // Used `self` as a parameter + FnArg::Receiver(_) => panic!( + "#[meilisearch_test] can only receive Client, Index or String as parameters" + ), + } + } + + // if a `Client` or an `Index` was asked for the test we must create a meilisearch `Client`. + let use_client = params + .iter() + .any(|param| matches!(param, Param::Client | Param::Index)); + // if a `String` or an `Index` was asked then we need to extract the name of the test function. + let use_name = params + .iter() + .any(|param| matches!(param, Param::String | Param::Index)); + let use_index = params.contains(&Param::Index); + + // Now we are going to build the body of the outer function + let mut outer_block: Vec = Vec::new(); + + // First we need to check if a client will be used and create it if it’s the case + if use_client { + outer_block.push(parse_quote!( + let meilisearch_url = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + )); + outer_block.push(parse_quote!( + let meilisearch_api_key = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + )); + outer_block.push(parse_quote!( + let client = Client::new(meilisearch_url, Some(meilisearch_api_key)).unwrap(); + )); + } + + // Now we do the same for the index name + if use_name { + let fn_name = &outer_fn.sig.ident; + // the name we're going to return is the complete path to the function i.e., something like that; + // `indexes::tests::test_fetch_info` but since the `::` are not allowed by meilisearch as an index + // name we're going to rename that to `indexes-tests-test_fetch_info`. + outer_block.push(parse_quote!( + let name = format!("{}::{}", std::module_path!(), stringify!(#fn_name)).replace("::", "-"); + )); + } + + // And finally if an index was asked, we delete it, and we (re)create it and wait until meilisearch confirm its creation. + if use_index { + outer_block.push(parse_quote!({ + let res = client + .delete_index(&name) + .await + .expect("Network issue while sending the delete index task") + .wait_for_completion(&client, None, None) + .await + .expect("Network issue while waiting for the index deletion"); + if res.is_failure() { + let error = res.unwrap_failure(); + assert_eq!( + error.error_code, + crate::errors::ErrorCode::IndexNotFound, + "{:?}", + error + ); + } + })); + + outer_block.push(parse_quote!( + let index = client + .create_index(&name, None) + .await + .expect("Network issue while sending the create index task") + .wait_for_completion(&client, None, None) + .await + .expect("Network issue while waiting for the index creation") + .try_make_index(&client) + .expect("Could not create the index out of the create index task"); + )); + } + + // Create a list of params separated by comma with the name we defined previously. + let params: Vec = params + .into_iter() + .map(|param| match param { + Param::Client => parse_quote!(client), + Param::Index => parse_quote!(index), + Param::String => parse_quote!(name), + }) + .collect(); + + // Now we can call the user code with our parameters :tada: + outer_block.push(parse_quote!( + let result = #inner_ident(#(#params.clone()),*).await; + )); + + // And right before the end, if an index was created and the tests successfully executed we delete it. + if use_index { + outer_block.push(parse_quote!( + index + .delete() + .await + .expect("Network issue while sending the last delete index task"); + // we early exit the test here and let meilisearch handle the deletion asynchronously + )); + } + + // Finally, for the great finish we just return the result the user gave us. + outer_block.push(parse_quote!(return result;)); + + outer_fn.sig.inputs.clear(); + outer_fn.sig.asyncness = inner_fn.sig.asyncness; + outer_fn.attrs.push(parse_quote!(#[tokio::test])); + outer_fn.block.stmts = outer_block; + } else { + panic!("#[meilisearch_test] can only be applied to async functions") + } + quote!( + #inner + #outer + ) + .into() +} diff --git a/backend/vendor/meilisearch-sdk/scripts/check-readme.sh b/backend/vendor/meilisearch-sdk/scripts/check-readme.sh new file mode 100644 index 000000000..bce41d4cd --- /dev/null +++ b/backend/vendor/meilisearch-sdk/scripts/check-readme.sh @@ -0,0 +1,42 @@ +#!/bin/sh + +# Checking that cargo is installed +command -v cargo > /dev/null 2>&1 +if [ "$?" -ne 0 ]; then + echo 'You must install cargo to make this script working.' + echo 'See https://doc.rust-lang.org/cargo/getting-started/installation.html' + exit 1 +fi + +# Installing cargo-readme if it's not installed yet +cargo install cargo-readme + +# Comparing the generated README and the current one +current_readme="README.md" +generated_readme="README.md_tmp" +cargo readme > "$generated_readme" + +# Exiting with the right message +echo '' +diff "$current_readme" "$generated_readme" > /dev/null 2>&1 +if [ "$?" = 0 ]; then + echo "OK" + rm -f "$generated_readme" + exit 0 +else + echo "The current README.md is not up-to-date with the template." + + # Displaying the diff if the --diff flag is activated + if [ "$1" = '--diff' ]; then + echo 'Diff found:' + diff "$current_readme" "$generated_readme" + else + echo 'To see the diff, run:' + echo ' $ sh scripts/check-readme.sh --diff' + echo 'To update the README, run:' + echo ' $ sh scripts/update-readme.sh' + fi + + rm -f "$generated_readme" + exit 1 +fi diff --git a/backend/vendor/meilisearch-sdk/scripts/update-readme.sh b/backend/vendor/meilisearch-sdk/scripts/update-readme.sh new file mode 100644 index 000000000..e2f6dd18e --- /dev/null +++ b/backend/vendor/meilisearch-sdk/scripts/update-readme.sh @@ -0,0 +1,15 @@ +#!/bin/sh + +# Checking that cargo is installed +command -v cargo > /dev/null 2>&1 +if [ "$?" -ne 0 ]; then + echo 'You must install cargo to make this script working.' + echo 'See https://doc.rust-lang.org/cargo/getting-started/installation.html' + exit +fi + +# Installing cargo-readme if it's not installed yet +cargo install cargo-readme + +# Generating the README.md file +cargo readme > README.md diff --git a/backend/vendor/meilisearch-sdk/scripts/update_macro_versions.sh b/backend/vendor/meilisearch-sdk/scripts/update_macro_versions.sh new file mode 100644 index 000000000..1c14eaba3 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/scripts/update_macro_versions.sh @@ -0,0 +1,10 @@ +#!/bin/sh +new_version=$(grep '^version = ' Cargo.toml) + +# Updates the versions in meilisearch-rust and meilisearch-index-setting-macro of the latter, with the latest meilisearch-rust version. + +old_index_macro_version=$(grep '^version = ' ./meilisearch-index-setting-macro/Cargo.toml) +old_macro_in_sdk_version=$(grep '{ path = "meilisearch-index-setting-macro", version =' ./Cargo.toml) + +sed -i '' -e "s/^$old_index_macro_version/$new_version/g" './meilisearch-index-setting-macro/Cargo.toml' +sed -i '' -e "s/$old_macro_in_sdk_version/meilisearch-index-setting-macro = { path = \"meilisearch-index-setting-macro\", $new_version }/g" './Cargo.toml' diff --git a/backend/vendor/meilisearch-sdk/src/.client.rs.pending-snap b/backend/vendor/meilisearch-sdk/src/.client.rs.pending-snap new file mode 100644 index 000000000..6a7b7874e --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/.client.rs.pending-snap @@ -0,0 +1 @@ +{"run_id":"1736847111-898738061","line":1346,"new":{"module_name":"meilisearch_sdk__client__tests","snapshot_name":"_inner_meilisearch_test_macro_test_error_delete_key","metadata":{"source":"src/client.rs","assertion_line":1346,"expression":"error","snapshot_kind":"text"},"snapshot":"Meilisearch auth: invalid_api_key: The provided API key is invalid.. https://docs.meilisearch.com/errors#invalid_api_key"},"old":{"module_name":"meilisearch_sdk__client__tests","metadata":{"snapshot_kind":"text"},"snapshot":"Meilisearch invalid_request: api_key_not_found: API key `invalid_key` not found.. https://docs.meilisearch.com/errors#api_key_not_found"}} diff --git a/backend/vendor/meilisearch-sdk/src/client.rs b/backend/vendor/meilisearch-sdk/src/client.rs new file mode 100644 index 000000000..3bb578d1e --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/client.rs @@ -0,0 +1,1622 @@ +use serde::de::Error as SerdeError; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use serde_json::{json, Value}; +use std::{collections::HashMap, time::Duration}; +use time::OffsetDateTime; + +use crate::{ + errors::*, + indexes::*, + key::{Key, KeyBuilder, KeyUpdater, KeysQuery, KeysResults}, + request::*, + search::*, + task_info::TaskInfo, + tasks::{Task, TasksCancelQuery, TasksDeleteQuery, TasksResults, TasksSearchQuery}, + utils::async_sleep, + DefaultHttpClient, +}; + +/// The top-level struct of the SDK, representing a client containing [indexes](../indexes/struct.Index.html). +#[derive(Debug, Clone)] +pub struct Client { + pub(crate) host: String, + pub(crate) api_key: Option, + pub(crate) http_client: Http, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SwapIndexes { + pub indexes: (String, String), +} + +#[cfg(feature = "reqwest")] +impl Client { + /// Create a client using the specified server. + /// + /// Don't put a '/' at the end of the host. + /// + /// In production mode, see [the documentation about authentication](https://www.meilisearch.com/docs/learn/security/master_api_keys#authentication). + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// ``` + pub fn new( + host: impl Into, + api_key: Option>, + ) -> Result { + let api_key = api_key.map(|key| key.into()); + let http_client = crate::reqwest::ReqwestClient::new(api_key.as_deref())?; + + Ok(Client { + host: host.into(), + api_key, + http_client, + }) + } +} + +impl Client { + // Create a client with a custom http client + pub fn new_with_client( + host: impl Into, + api_key: Option>, + http_client: Http, + ) -> Client { + Client { + host: host.into(), + api_key: api_key.map(|key| key.into()), + http_client, + } + } + + fn parse_indexes_results_from_value( + &self, + value: &Value, + ) -> Result, Error> { + let raw_indexes = value["results"] + .as_array() + .ok_or_else(|| serde_json::Error::custom("Missing or invalid 'results' field")) + .map_err(Error::ParseError)?; + + let limit = value["limit"] + .as_u64() + .ok_or_else(|| serde_json::Error::custom("Missing or invalid 'limit' field")) + .map_err(Error::ParseError)? as u32; + + let offset = value["offset"] + .as_u64() + .ok_or_else(|| serde_json::Error::custom("Missing or invalid 'offset' field")) + .map_err(Error::ParseError)? as u32; + + let total = value["total"] + .as_u64() + .ok_or_else(|| serde_json::Error::custom("Missing or invalid 'total' field")) + .map_err(Error::ParseError)? as u32; + + let results = raw_indexes + .iter() + .map(|raw_index| Index::from_value(raw_index.clone(), self.clone())) + .collect::>()?; + + let indexes_results = IndexesResults { + limit, + offset, + total, + results, + }; + + Ok(indexes_results) + } + + pub async fn execute_multi_search_query( + &self, + body: &MultiSearchQuery<'_, '_, Http>, + ) -> Result, Error> { + self.http_client + .request::<(), &MultiSearchQuery, MultiSearchResponse>( + &format!("{}/multi-search", &self.host), + Method::Post { body, query: () }, + 200, + ) + .await + } + + pub async fn execute_federated_multi_search_query< + T: 'static + DeserializeOwned + Send + Sync, + >( + &self, + body: &FederatedMultiSearchQuery<'_, '_, Http>, + ) -> Result, Error> { + self.http_client + .request::<(), &FederatedMultiSearchQuery, FederatedMultiSearchResponse>( + &format!("{}/multi-search", &self.host), + Method::Post { body, query: () }, + 200, + ) + .await + } + + /// Make multiple search requests. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*, search::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// #[derive(Serialize, Deserialize, Debug)] + /// struct Movie { + /// name: String, + /// description: String, + /// } + /// + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut movies = client.index("search"); + /// # // add some documents + /// # movies.add_or_replace(&[Movie{name:String::from("Interstellar"), description:String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.")},Movie{name:String::from("Unknown"), description:String::from("Unknown")}], Some("name")).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// + /// let search_query_1 = SearchQuery::new(&movies) + /// .with_query("Interstellar") + /// .build(); + /// let search_query_2 = SearchQuery::new(&movies) + /// .with_query("") + /// .build(); + /// + /// let response = client + /// .multi_search() + /// .with_search_query(search_query_1) + /// .with_search_query(search_query_2) + /// .execute::() + /// .await + /// .unwrap(); + /// + /// assert_eq!(response.results.len(), 2); + /// # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + /// + /// # Federated Search + /// + /// You can use [`MultiSearchQuery::with_federation`] to perform a [federated + /// search][1] where results from different indexes are merged and returned as + /// one list. + /// + /// When executing a federated query, the type parameter `T` is less clear, + /// as the documents in the different indexes potentially have different + /// fields and you might have one Rust type per index. In most cases, you + /// either want to create an enum with one variant per index and `#[serde + /// (untagged)]` attribute, or if you need more control, just pass + /// `serde_json::Map` and then deserialize that + /// into the appropriate target types later. + /// + /// [1]: https://www.meilisearch.com/docs/learn/multi_search/multi_search_vs_federated_search#what-is-federated-search + #[must_use] + pub fn multi_search(&self) -> MultiSearchQuery { + MultiSearchQuery::new(self) + } + + /// Return the host associated with this index. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*}; + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// let client = Client::new("http://doggo.dog", Some(MEILISEARCH_API_KEY)).unwrap(); + /// + /// assert_eq!(client.get_host(), "http://doggo.dog"); + /// ``` + #[must_use] + pub fn get_host(&self) -> &str { + &self.host + } + + /// Return the api key associated with this index. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*}; + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// let client = Client::new(MEILISEARCH_URL, Some("doggo")).unwrap(); + /// + /// assert_eq!(client.get_api_key(), Some("doggo")); + /// ``` + #[must_use] + pub fn get_api_key(&self) -> Option<&str> { + self.api_key.as_deref() + } + + /// List all [Indexes](Index) with query parameters and return values as instances of [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let indexes: IndexesResults = client.list_all_indexes().await.unwrap(); + /// + /// let indexes: IndexesResults = client.list_all_indexes().await.unwrap(); + /// println!("{:?}", indexes); + /// # }); + /// ``` + pub async fn list_all_indexes(&self) -> Result, Error> { + let value = self.list_all_indexes_raw().await?; + let indexes_results = self.parse_indexes_results_from_value(&value)?; + Ok(indexes_results) + } + + /// List all [Indexes](Index) and returns values as instances of [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut query = IndexesQuery::new(&client); + /// query.with_limit(1); + /// + /// let indexes: IndexesResults = client.list_all_indexes_with(&query).await.unwrap(); + /// + /// assert_eq!(indexes.limit, 1); + /// # }); + /// ``` + pub async fn list_all_indexes_with( + &self, + indexes_query: &IndexesQuery<'_, Http>, + ) -> Result, Error> { + let value = self.list_all_indexes_raw_with(indexes_query).await?; + let indexes_results = self.parse_indexes_results_from_value(&value)?; + + Ok(indexes_results) + } + + /// List all [Indexes](Index) and returns as Json. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let json_indexes = client.list_all_indexes_raw().await.unwrap(); + /// + /// println!("{:?}", json_indexes); + /// # }); + /// ``` + pub async fn list_all_indexes_raw(&self) -> Result { + let json_indexes = self + .http_client + .request::<(), (), Value>( + &format!("{}/indexes", self.host), + Method::Get { query: () }, + 200, + ) + .await?; + + Ok(json_indexes) + } + + /// List all [Indexes](Index) with query parameters and returns as Json. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut query = IndexesQuery::new(&client); + /// query.with_limit(1); + /// + /// let json_indexes = client.list_all_indexes_raw_with(&query).await.unwrap(); + /// + /// println!("{:?}", json_indexes); + /// # }); + /// ``` + pub async fn list_all_indexes_raw_with( + &self, + indexes_query: &IndexesQuery<'_, Http>, + ) -> Result { + let json_indexes = self + .http_client + .request::<&IndexesQuery, (), Value>( + &format!("{}/indexes", self.host), + Method::Get { + query: indexes_query, + }, + 200, + ) + .await?; + + Ok(json_indexes) + } + + /// Get an [Index], this index should already exist. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # let index = client.create_index("get_index", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap().try_make_index(&client).unwrap(); + /// let index = client.get_index("get_index").await.unwrap(); + /// + /// assert_eq!(index.as_ref(), "get_index"); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_index(&self, uid: impl AsRef) -> Result, Error> { + let mut idx = self.index(uid.as_ref()); + idx.fetch_info().await?; + Ok(idx) + } + + /// Get a raw JSON [Index], this index should already exist. + /// + /// If you use it directly from an [Index], you can use the method [`Index::fetch_info`], which is the equivalent method from an index. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # let index = client.create_index("get_raw_index", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap().try_make_index(&client).unwrap(); + /// let raw_index = client.get_raw_index("get_raw_index").await.unwrap(); + /// + /// assert_eq!(raw_index.get("uid").unwrap().as_str().unwrap(), "get_raw_index"); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_raw_index(&self, uid: impl AsRef) -> Result { + self.http_client + .request::<(), (), Value>( + &format!("{}/indexes/{}", self.host, uid.as_ref()), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Create a corresponding object of an [Index] without any check or doing an HTTP call. + pub fn index(&self, uid: impl Into) -> Index { + Index::new(uid, self.clone()) + } + + /// Create an [Index]. + /// + /// The second parameter will be used as the primary key of the new index. + /// If it is not specified, Meilisearch will **try** to infer the primary key. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// // Create a new index called movies and access it + /// let task = client.create_index("create_index", None).await.unwrap(); + /// + /// // Wait for the task to complete + /// let task = task.wait_for_completion(&client, None, None).await.unwrap(); + /// + /// // Try to get the inner index if the task succeeded + /// let index = task.try_make_index(&client).unwrap(); + /// + /// assert_eq!(index.as_ref(), "create_index"); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn create_index( + &self, + uid: impl AsRef, + primary_key: Option<&str>, + ) -> Result { + self.http_client + .request::<(), Value, TaskInfo>( + &format!("{}/indexes", self.host), + Method::Post { + query: (), + body: json!({ + "uid": uid.as_ref(), + "primaryKey": primary_key, + }), + }, + 202, + ) + .await + } + + /// Delete an index from its UID. + /// + /// To delete an [Index], use the [`Index::delete`] method. + pub async fn delete_index(&self, uid: impl AsRef) -> Result { + self.http_client + .request::<(), (), TaskInfo>( + &format!("{}/indexes/{}", self.host, uid.as_ref()), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Alias for [`Client::list_all_indexes`]. + pub async fn get_indexes(&self) -> Result, Error> { + self.list_all_indexes().await + } + + /// Alias for [`Client::list_all_indexes_with`]. + pub async fn get_indexes_with( + &self, + indexes_query: &IndexesQuery<'_, Http>, + ) -> Result, Error> { + self.list_all_indexes_with(indexes_query).await + } + + /// Alias for [`Client::list_all_indexes_raw`]. + pub async fn get_indexes_raw(&self) -> Result { + self.list_all_indexes_raw().await + } + + /// Alias for [`Client::list_all_indexes_raw_with`]. + pub async fn get_indexes_raw_with( + &self, + indexes_query: &IndexesQuery<'_, Http>, + ) -> Result { + self.list_all_indexes_raw_with(indexes_query).await + } + + /// Swaps a list of two [Indexes](Index). + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let task_index_1 = client.create_index("swap_index_1", None).await.unwrap(); + /// let task_index_2 = client.create_index("swap_index_2", None).await.unwrap(); + /// + /// // Wait for the task to complete + /// task_index_2.wait_for_completion(&client, None, None).await.unwrap(); + /// + /// let task = client + /// .swap_indexes([&SwapIndexes { + /// indexes: ( + /// "swap_index_1".to_string(), + /// "swap_index_2".to_string(), + /// ), + /// }]) + /// .await + /// .unwrap(); + /// + /// client.index("swap_index_1").delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// client.index("swap_index_2").delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn swap_indexes( + &self, + indexes: impl IntoIterator, + ) -> Result { + self.http_client + .request::<(), Vec<&SwapIndexes>, TaskInfo>( + &format!("{}/swap-indexes", self.host), + Method::Post { + query: (), + body: indexes.into_iter().collect(), + }, + 202, + ) + .await + } + + /// Get stats of all [Indexes](Index). + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let stats = client.get_stats().await.unwrap(); + /// # }); + /// ``` + pub async fn get_stats(&self) -> Result { + self.http_client + .request::<(), (), ClientStats>( + &format!("{}/stats", self.host), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get health of Meilisearch server. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, errors::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let health = client.health().await.unwrap(); + /// + /// assert_eq!(health.status, "available"); + /// # }); + /// ``` + pub async fn health(&self) -> Result { + self.http_client + .request::<(), (), Health>( + &format!("{}/health", self.host), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get health of Meilisearch server. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::client::*; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let health = client.is_healthy().await; + /// + /// assert_eq!(health, true); + /// # }); + /// ``` + pub async fn is_healthy(&self) -> bool { + if let Ok(health) = self.health().await { + health.status.as_str() == "available" + } else { + false + } + } + + /// Get the API [Keys](Key) from Meilisearch with parameters. + /// + /// See [`Client::create_key`], [`Client::get_key`], and the [meilisearch documentation](https://www.meilisearch.com/docs/reference/api/keys#get-all-keys). + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, errors::Error, key::KeysQuery}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut query = KeysQuery::new(); + /// query.with_limit(1); + /// + /// let keys = client.get_keys_with(&query).await.unwrap(); + /// + /// assert_eq!(keys.results.len(), 1); + /// # }); + /// ``` + pub async fn get_keys_with(&self, keys_query: &KeysQuery) -> Result { + let keys = self + .http_client + .request::<&KeysQuery, (), KeysResults>( + &format!("{}/keys", self.host), + Method::Get { query: keys_query }, + 200, + ) + .await?; + + Ok(keys) + } + + /// Get the API [Keys](Key) from Meilisearch. + /// + /// See [`Client::create_key`], [`Client::get_key`], and the [meilisearch documentation](https://www.meilisearch.com/docs/reference/api/keys#get-all-keys). + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, errors::Error, key::KeyBuilder}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let keys = client.get_keys().await.unwrap(); + /// + /// assert_eq!(keys.limit, 20); + /// # }); + /// ``` + pub async fn get_keys(&self) -> Result { + let keys = self + .http_client + .request::<(), (), KeysResults>( + &format!("{}/keys", self.host), + Method::Get { query: () }, + 200, + ) + .await?; + + Ok(keys) + } + + /// Get one API [Key] from Meilisearch. + /// + /// See also [`Client::create_key`], [`Client::get_keys`], and the [meilisearch documentation](https://www.meilisearch.com/docs/reference/api/keys#get-one-key). + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, errors::Error, key::KeyBuilder}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # let key = client.get_keys().await.unwrap().results.into_iter() + /// # .find(|k| k.name.as_ref().map_or(false, |name| name.starts_with("Default Search API Key"))) + /// # .expect("No default search key"); + /// let key = client.get_key(key).await.expect("Invalid key"); + /// + /// assert_eq!(key.name, Some("Default Search API Key".to_string())); + /// # }); + /// ``` + pub async fn get_key(&self, key: impl AsRef) -> Result { + self.http_client + .request::<(), (), Key>( + &format!("{}/keys/{}", self.host, key.as_ref()), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Delete an API [Key] from Meilisearch. + /// + /// See also [`Client::create_key`], [`Client::update_key`], [`Client::get_key`], and the [meilisearch documentation](https://www.meilisearch.com/docs/reference/api/keys#delete-a-key). + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, errors::Error, key::KeyBuilder}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let key = KeyBuilder::new(); + /// let key = client.create_key(key).await.unwrap(); + /// let inner_key = key.key.clone(); + /// + /// client.delete_key(key).await.unwrap(); + /// + /// let keys = client.get_keys().await.unwrap(); + /// + /// assert!(keys.results.iter().all(|key| key.key != inner_key)); + /// # }); + /// ``` + pub async fn delete_key(&self, key: impl AsRef) -> Result<(), Error> { + self.http_client + .request::<(), (), ()>( + &format!("{}/keys/{}", self.host, key.as_ref()), + Method::Delete { query: () }, + 204, + ) + .await + } + + /// Create an API [Key] in Meilisearch. + /// + /// See also [`Client::update_key`], [`Client::delete_key`], [`Client::get_key`], and the [meilisearch documentation](https://www.meilisearch.com/docs/reference/api/keys#create-a-key). + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, errors::Error, key::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let name = "create_key".to_string(); + /// let mut key = KeyBuilder::new(); + /// key.with_name(&name); + /// + /// let key = client.create_key(key).await.unwrap(); + /// + /// assert_eq!(key.name, Some(name)); + /// # client.delete_key(key).await.unwrap(); + /// # }); + /// ``` + pub async fn create_key(&self, key: impl AsRef) -> Result { + self.http_client + .request::<(), &KeyBuilder, Key>( + &format!("{}/keys", self.host), + Method::Post { + query: (), + body: key.as_ref(), + }, + 201, + ) + .await + } + + /// Update an API [Key] in Meilisearch. + /// + /// See also [`Client::create_key`], [`Client::delete_key`], [`Client::get_key`], and the [meilisearch documentation](https://www.meilisearch.com/docs/reference/api/keys#update-a-key). + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, errors::Error, key::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let new_key = KeyBuilder::new(); + /// let mut new_key = client.create_key(new_key).await.unwrap(); + /// let mut key_update = KeyUpdater::new(new_key); + /// + /// let name = "my name".to_string(); + /// key_update.with_name(&name); + /// + /// let key = client.update_key(key_update).await.unwrap(); + /// + /// assert_eq!(key.name, Some(name)); + /// # client.delete_key(key).await.unwrap(); + /// # }); + /// ``` + pub async fn update_key(&self, key: impl AsRef) -> Result { + self.http_client + .request::<(), &KeyUpdater, Key>( + &format!("{}/keys/{}", self.host, key.as_ref().key), + Method::Patch { + body: key.as_ref(), + query: (), + }, + 200, + ) + .await + } + + /// Get version of the Meilisearch server. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::client::*; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let version = client.get_version().await.unwrap(); + /// # }); + /// ``` + pub async fn get_version(&self) -> Result { + self.http_client + .request::<(), (), Version>( + &format!("{}/version", self.host), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Wait until Meilisearch processes a [Task], and get its status. + /// + /// `interval` = The frequency at which the server should be polled. **Default = 50ms** + /// + /// `timeout` = The maximum time to wait for processing to complete. **Default = 5000ms** + /// + /// If the waited time exceeds `timeout` then an [`Error::Timeout`] will be returned. + /// + /// See also [`Index::wait_for_task`, `Task::wait_for_completion`, `TaskInfo::wait_for_completion`]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, tasks::*}; + /// # use serde::{Serialize, Deserialize}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # + /// # #[derive(Debug, Serialize, Deserialize, PartialEq)] + /// # struct Document { + /// # id: usize, + /// # value: String, + /// # kind: String, + /// # } + /// # + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movies = client.index("movies_client_wait_for_task"); + /// + /// let task = movies.add_documents(&[ + /// Document { id: 0, kind: "title".into(), value: "The Social Network".to_string() }, + /// Document { id: 1, kind: "title".into(), value: "Harry Potter and the Sorcerer's Stone".to_string() }, + /// ], None).await.unwrap(); + /// + /// let status = client.wait_for_task(task, None, None).await.unwrap(); + /// + /// assert!(matches!(status, Task::Succeeded { .. })); + /// # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn wait_for_task( + &self, + task_id: impl AsRef, + interval: Option, + timeout: Option, + ) -> Result { + let interval = interval.unwrap_or_else(|| Duration::from_millis(50)); + let timeout = timeout.unwrap_or_else(|| Duration::from_millis(5000)); + + let mut elapsed_time = Duration::new(0, 0); + let mut task_result: Result; + + while timeout > elapsed_time { + task_result = self.get_task(&task_id).await; + match task_result { + Ok(status) => match status { + Task::Failed { .. } | Task::Succeeded { .. } => { + return self.get_task(task_id).await; + } + Task::Enqueued { .. } | Task::Processing { .. } => { + elapsed_time += interval; + async_sleep(interval).await; + } + }, + Err(error) => return Err(error), + }; + } + + Err(Error::Timeout) + } + + /// Get a task from the server given a task id. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, tasks::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # let index = client.create_index("movies_get_task", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap().try_make_index(&client).unwrap(); + /// let task = index.delete_all_documents().await.unwrap(); + /// + /// let task = client.get_task(task).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_task(&self, task_id: impl AsRef) -> Result { + self.http_client + .request::<(), (), Task>( + &format!("{}/tasks/{}", self.host, task_id.as_ref()), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get all tasks with query parameters from the server. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, tasks::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut query = TasksSearchQuery::new(&client); + /// query.with_index_uids(["get_tasks_with"]); + /// + /// let tasks = client.get_tasks_with(&query).await.unwrap(); + /// # }); + /// ``` + pub async fn get_tasks_with( + &self, + tasks_query: &TasksSearchQuery<'_, Http>, + ) -> Result { + let tasks = self + .http_client + .request::<&TasksSearchQuery, (), TasksResults>( + &format!("{}/tasks", self.host), + Method::Get { query: tasks_query }, + 200, + ) + .await?; + + Ok(tasks) + } + + /// Cancel tasks with filters [`TasksCancelQuery`]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, tasks::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut query = TasksCancelQuery::new(&client); + /// query.with_index_uids(["movies"]); + /// + /// let res = client.cancel_tasks_with(&query).await.unwrap(); + /// # }); + /// ``` + pub async fn cancel_tasks_with( + &self, + filters: &TasksCancelQuery<'_, Http>, + ) -> Result { + let tasks = self + .http_client + .request::<&TasksCancelQuery, (), TaskInfo>( + &format!("{}/tasks/cancel", self.host), + Method::Post { + query: filters, + body: (), + }, + 200, + ) + .await?; + + Ok(tasks) + } + + /// Delete tasks with filters [`TasksDeleteQuery`]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, tasks::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut query = TasksDeleteQuery::new(&client); + /// query.with_index_uids(["movies"]); + /// + /// let res = client.delete_tasks_with(&query).await.unwrap(); + /// # }); + /// ``` + pub async fn delete_tasks_with( + &self, + filters: &TasksDeleteQuery<'_, Http>, + ) -> Result { + let tasks = self + .http_client + .request::<&TasksDeleteQuery, (), TaskInfo>( + &format!("{}/tasks", self.host), + Method::Delete { query: filters }, + 200, + ) + .await?; + + Ok(tasks) + } + + /// Get all tasks from the server. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, tasks::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let tasks = client.get_tasks().await.unwrap(); + /// + /// assert!(tasks.results.len() > 0); + /// # }); + /// ``` + pub async fn get_tasks(&self) -> Result { + let tasks = self + .http_client + .request::<(), (), TasksResults>( + &format!("{}/tasks", self.host), + Method::Get { query: () }, + 200, + ) + .await?; + + Ok(tasks) + } + + /// Generates a new tenant token. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::client::Client; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let api_key_uid = "76cf8b87-fd12-4688-ad34-260d930ca4f4".to_string(); + /// let token = client.generate_tenant_token(api_key_uid, serde_json::json!(["*"]), None, None).unwrap(); + /// + /// let client = Client::new(MEILISEARCH_URL, Some(token)).unwrap(); + /// # }); + /// ``` + #[cfg(not(target_arch = "wasm32"))] + pub fn generate_tenant_token( + &self, + api_key_uid: String, + search_rules: Value, + api_key: Option<&str>, + expires_at: Option, + ) -> Result { + let api_key = match self.get_api_key() { + Some(key) => api_key.unwrap_or(key), + None => { + return Err(Error::CantUseWithoutApiKey( + "generate_tenant_token".to_string(), + )) + } + }; + + crate::tenant_tokens::generate_tenant_token(api_key_uid, search_rules, api_key, expires_at) + } +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ClientStats { + pub database_size: usize, + #[serde(with = "time::serde::rfc3339::option")] + pub last_update: Option, + pub indexes: HashMap, +} + +/// Health of the Meilisearch server. +/// +/// # Example +/// +/// ``` +/// # use meilisearch_sdk::{client::*, indexes::*, errors::Error}; +/// Health { +/// status: "available".to_string(), +/// }; +/// ``` +#[derive(Debug, Clone, Deserialize)] +pub struct Health { + pub status: String, +} + +/// Version of a Meilisearch server. +/// +/// # Example +/// +/// ``` +/// # use meilisearch_sdk::{client::*, indexes::*, errors::Error}; +/// Version { +/// commit_sha: "b46889b5f0f2f8b91438a08a358ba8f05fc09fc1".to_string(), +/// commit_date: "2019-11-15T09:51:54.278247+00:00".to_string(), +/// pkg_version: "0.1.1".to_string(), +/// }; +/// ``` +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Version { + pub commit_sha: String, + pub commit_date: String, + pub pkg_version: String, +} + +#[cfg(test)] +mod tests { + use big_s::S; + use time::OffsetDateTime; + + use meilisearch_test_macro::meilisearch_test; + + use crate::{client::*, key::Action, reqwest::qualified_version}; + + #[derive(Debug, Serialize, Deserialize, PartialEq)] + struct Document { + id: String, + } + + #[meilisearch_test] + async fn test_swapping_two_indexes(client: Client) { + let index_1 = client.index("test_swapping_two_indexes_1"); + let index_2 = client.index("test_swapping_two_indexes_2"); + + let t0 = index_1 + .add_documents( + &[Document { + id: "1".to_string(), + }], + None, + ) + .await + .unwrap(); + + index_2 + .add_documents( + &[Document { + id: "2".to_string(), + }], + None, + ) + .await + .unwrap(); + + t0.wait_for_completion(&client, None, None).await.unwrap(); + + let task = client + .swap_indexes([&SwapIndexes { + indexes: ( + "test_swapping_two_indexes_1".to_string(), + "test_swapping_two_indexes_2".to_string(), + ), + }]) + .await + .unwrap(); + task.wait_for_completion(&client, None, None).await.unwrap(); + + let document = index_1.get_document("2").await.unwrap(); + + assert_eq!( + Document { + id: "2".to_string() + }, + document + ); + } + + #[meilisearch_test] + async fn test_methods_has_qualified_version_as_header() { + let mut s = mockito::Server::new_async().await; + let mock_server_url = s.url(); + let path = "/hello"; + let address = &format!("{mock_server_url}{path}"); + let user_agent = &*qualified_version(); + let client = Client::new(mock_server_url, None::).unwrap(); + + let assertions = vec![ + ( + s.mock("GET", path) + .match_header("User-Agent", user_agent) + .create_async() + .await, + client + .http_client + .request::<(), (), ()>(address, Method::Get { query: () }, 200), + ), + ( + s.mock("POST", path) + .match_header("User-Agent", user_agent) + .create_async() + .await, + client.http_client.request::<(), (), ()>( + address, + Method::Post { + query: (), + body: {}, + }, + 200, + ), + ), + ( + s.mock("DELETE", path) + .match_header("User-Agent", user_agent) + .create_async() + .await, + client.http_client.request::<(), (), ()>( + address, + Method::Delete { query: () }, + 200, + ), + ), + ( + s.mock("PUT", path) + .match_header("User-Agent", user_agent) + .create_async() + .await, + client.http_client.request::<(), (), ()>( + address, + Method::Put { + query: (), + body: (), + }, + 200, + ), + ), + ( + s.mock("PATCH", path) + .match_header("User-Agent", user_agent) + .create_async() + .await, + client.http_client.request::<(), (), ()>( + address, + Method::Patch { + query: (), + body: (), + }, + 200, + ), + ), + ]; + + for (m, req) in assertions { + let _ = req.await; + + m.assert_async().await; + } + } + + #[meilisearch_test] + async fn test_get_tasks(client: Client) { + let tasks = client.get_tasks().await.unwrap(); + assert_eq!(tasks.limit, 20); + } + + #[meilisearch_test] + async fn test_get_tasks_with_params(client: Client) { + let query = TasksSearchQuery::new(&client); + let tasks = client.get_tasks_with(&query).await.unwrap(); + + assert_eq!(tasks.limit, 20); + } + + #[meilisearch_test] + async fn test_get_keys(client: Client) { + let keys = client.get_keys().await.unwrap(); + + assert!(keys.results.len() >= 2); + } + + #[meilisearch_test] + async fn test_delete_key(client: Client, name: String) { + let mut key = KeyBuilder::new(); + key.with_name(&name); + let key = client.create_key(key).await.unwrap(); + + client.delete_key(&key).await.unwrap(); + let keys = KeysQuery::new() + .with_limit(10000) + .execute(&client) + .await + .unwrap(); + + assert!(keys.results.iter().all(|k| k.key != key.key)); + } + + #[meilisearch_test] + async fn test_error_delete_key(client: Client, name: String) { + // ==> accessing a key that does not exist + let error = client.delete_key("invalid_key").await.unwrap_err(); + insta::assert_snapshot!(error, @"Meilisearch invalid_request: api_key_not_found: API key `invalid_key` not found.. https://docs.meilisearch.com/errors#api_key_not_found"); + + // ==> executing the action without enough right + let mut key = KeyBuilder::new(); + + key.with_name(&name); + let key = client.create_key(key).await.unwrap(); + let master_key = client.api_key.clone(); + + // create a new client with no right + let client = Client::new(client.host, Some(key.key.clone())).unwrap(); + // with a wrong key + let error = client.delete_key("invalid_key").await.unwrap_err(); + insta::assert_snapshot!(error, @"Meilisearch auth: invalid_api_key: The provided API key is invalid.. https://docs.meilisearch.com/errors#invalid_api_key"); + assert!(matches!( + error, + Error::Meilisearch(MeilisearchError { + error_code: ErrorCode::InvalidApiKey, + error_type: ErrorType::Auth, + .. + }) + )); + // with a good key + let error = client.delete_key(&key.key).await.unwrap_err(); + insta::assert_snapshot!(error, @"Meilisearch auth: invalid_api_key: The provided API key is invalid.. https://docs.meilisearch.com/errors#invalid_api_key"); + assert!(matches!( + error, + Error::Meilisearch(MeilisearchError { + error_code: ErrorCode::InvalidApiKey, + error_type: ErrorType::Auth, + .. + }) + )); + + // cleanup + let client = Client::new(client.host, master_key).unwrap(); + client.delete_key(key).await.unwrap(); + } + + #[meilisearch_test] + async fn test_create_key(client: Client, name: String) { + let expires_at = OffsetDateTime::now_utc() + time::Duration::HOUR; + let mut key = KeyBuilder::new(); + key.with_action(Action::DocumentsAdd) + .with_name(&name) + .with_expires_at(expires_at) + .with_description("a description") + .with_index("*"); + let key = client.create_key(key).await.unwrap(); + + assert_eq!(key.actions, vec![Action::DocumentsAdd]); + assert_eq!(&key.name, &Some(name)); + // We can't compare the two timestamps directly because of some nanoseconds imprecision with the floats + assert_eq!( + key.expires_at.unwrap().unix_timestamp(), + expires_at.unix_timestamp() + ); + assert_eq!(key.indexes, vec![S("*")]); + + client.delete_key(key).await.unwrap(); + } + + #[meilisearch_test] + async fn test_error_create_key(client: Client, name: String) { + // ==> Invalid index name + /* TODO: uncomment once meilisearch fix this bug: https://github.com/meilisearch/meilisearch/issues/2158 + let mut key = KeyBuilder::new(); + key.with_index("invalid index # / \\name with spaces"); + let error = client.create_key(key).await.unwrap_err(); + + assert!(matches!( + error, + Error::MeilisearchError { + error_code: ErrorCode::InvalidApiKeyIndexes, + error_type: ErrorType::InvalidRequest, + .. + } + )); + */ + // ==> executing the action without enough right + let mut no_right_key = KeyBuilder::new(); + no_right_key.with_name(format!("{name}_1")); + let no_right_key = client.create_key(no_right_key).await.unwrap(); + + // backup the master key for cleanup at the end of the test + let master_client = client.clone(); + let client = Client::new(&master_client.host, Some(no_right_key.key.clone())).unwrap(); + + let mut key = KeyBuilder::new(); + key.with_name(format!("{name}_2")); + let error = client.create_key(key).await.unwrap_err(); + + assert!(matches!( + error, + Error::Meilisearch(MeilisearchError { + error_code: ErrorCode::InvalidApiKey, + error_type: ErrorType::Auth, + .. + }) + )); + + // cleanup + master_client + .delete_key(client.api_key.unwrap()) + .await + .unwrap(); + } + + #[meilisearch_test] + async fn test_update_key(client: Client, description: String) { + let mut key = KeyBuilder::new(); + key.with_name("test_update_key"); + let mut key = client.create_key(key).await.unwrap(); + + let name = S("new name"); + key.with_description(&description); + key.with_name(&name); + + let key = key.update(&client).await.unwrap(); + + assert_eq!(key.description, Some(description)); + assert_eq!(key.name, Some(name)); + + client.delete_key(key).await.unwrap(); + } + + #[meilisearch_test] + async fn test_get_index(client: Client, index_uid: String) -> Result<(), Error> { + let task = client.create_index(&index_uid, None).await?; + let index = client + .wait_for_task(task, None, None) + .await? + .try_make_index(&client) + .unwrap(); + + assert_eq!(index.uid, index_uid); + index + .delete() + .await? + .wait_for_completion(&client, None, None) + .await?; + Ok(()) + } + + #[meilisearch_test] + async fn test_error_create_index(client: Client, index: Index) -> Result<(), Error> { + let error = client + .create_index("Wrong index name", None) + .await + .unwrap_err(); + + assert!(matches!( + error, + Error::Meilisearch(MeilisearchError { + error_code: ErrorCode::InvalidIndexUid, + error_type: ErrorType::InvalidRequest, + .. + }) + )); + + // we try to create an index with the same uid of an already existing index + let error = client + .create_index(&*index.uid, None) + .await? + .wait_for_completion(&client, None, None) + .await? + .unwrap_failure(); + + assert!(matches!( + error, + MeilisearchError { + error_code: ErrorCode::IndexAlreadyExists, + error_type: ErrorType::InvalidRequest, + .. + } + )); + Ok(()) + } + + #[meilisearch_test] + async fn test_list_all_indexes(client: Client) { + let all_indexes = client.list_all_indexes().await.unwrap(); + + assert_eq!(all_indexes.limit, 20); + assert_eq!(all_indexes.offset, 0); + } + + #[meilisearch_test] + async fn test_list_all_indexes_with_params(client: Client) { + let mut query = IndexesQuery::new(&client); + query.with_limit(1); + let all_indexes = client.list_all_indexes_with(&query).await.unwrap(); + + assert_eq!(all_indexes.limit, 1); + assert_eq!(all_indexes.offset, 0); + } + + #[meilisearch_test] + async fn test_list_all_indexes_raw(client: Client) { + let all_indexes_raw = client.list_all_indexes_raw().await.unwrap(); + + assert_eq!(all_indexes_raw["limit"], json!(20)); + assert_eq!(all_indexes_raw["offset"], json!(0)); + } + + #[meilisearch_test] + async fn test_list_all_indexes_raw_with_params(client: Client) { + let mut query = IndexesQuery::new(&client); + query.with_limit(1); + let all_indexes_raw = client.list_all_indexes_raw_with(&query).await.unwrap(); + + assert_eq!(all_indexes_raw["limit"], json!(1)); + assert_eq!(all_indexes_raw["offset"], json!(0)); + } + + #[meilisearch_test] + async fn test_get_primary_key_is_none(mut index: Index) { + let primary_key = index.get_primary_key().await; + + assert!(primary_key.is_ok()); + assert!(primary_key.unwrap().is_none()); + } + + #[meilisearch_test] + async fn test_get_primary_key(client: Client, index_uid: String) -> Result<(), Error> { + let mut index = client + .create_index(index_uid, Some("primary_key")) + .await? + .wait_for_completion(&client, None, None) + .await? + .try_make_index(&client) + .unwrap(); + + let primary_key = index.get_primary_key().await; + assert!(primary_key.is_ok()); + assert_eq!(primary_key?.unwrap(), "primary_key"); + + index + .delete() + .await? + .wait_for_completion(&client, None, None) + .await?; + + Ok(()) + } +} diff --git a/backend/vendor/meilisearch-sdk/src/documents.rs b/backend/vendor/meilisearch-sdk/src/documents.rs new file mode 100644 index 000000000..17e2e6102 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/documents.rs @@ -0,0 +1,689 @@ +use async_trait::async_trait; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; + +/// Derive the [`IndexConfig`](crate::documents::IndexConfig) trait. +/// +/// ## Field attribute +/// Use the `#[index_config(..)]` field attribute to generate the correct settings +/// for each field. The available parameters are: +/// - `primary_key` (can only be used once) +/// - `distinct` (can only be used once) +/// - `searchable` +/// - `displayed` +/// - `filterable` +/// - `sortable` +/// +/// ## Index name +/// The name of the index will be the name of the struct converted to snake case. +/// +/// ## Sample usage: +/// ``` +/// use serde::{Serialize, Deserialize}; +/// use meilisearch_sdk::documents::IndexConfig; +/// use meilisearch_sdk::settings::Settings; +/// use meilisearch_sdk::indexes::Index; +/// use meilisearch_sdk::client::Client; +/// +/// #[derive(Serialize, Deserialize, IndexConfig)] +/// struct Movie { +/// #[index_config(primary_key)] +/// movie_id: u64, +/// #[index_config(displayed, searchable)] +/// title: String, +/// #[index_config(displayed)] +/// description: String, +/// #[index_config(filterable, sortable, displayed)] +/// release_date: String, +/// #[index_config(filterable, displayed)] +/// genres: Vec, +/// } +/// +/// async fn usage(client: Client) { +/// // Default settings with the distinct, searchable, displayed, filterable, and sortable fields set correctly. +/// let settings: Settings = Movie::generate_settings(); +/// // Index created with the name `movie` and the primary key set to `movie_id` +/// let index: Index = Movie::generate_index(&client).await.unwrap(); +/// } +/// ``` +pub use meilisearch_index_setting_macro::IndexConfig; + +use crate::client::Client; +use crate::request::HttpClient; +use crate::settings::Settings; +use crate::task_info::TaskInfo; +use crate::tasks::Task; +use crate::{errors::Error, indexes::Index}; + +#[async_trait(?Send)] +pub trait IndexConfig { + const INDEX_STR: &'static str; + + #[must_use] + fn index(client: &Client) -> Index { + client.index(Self::INDEX_STR) + } + fn generate_settings() -> Settings; + async fn generate_index(client: &Client) -> Result, Task>; +} + +#[derive(Debug, Clone, Deserialize)] +pub struct DocumentsResults { + pub results: Vec, + pub limit: u32, + pub offset: u32, + pub total: u32, +} + +#[derive(Debug, Clone, Serialize)] +pub struct DocumentQuery<'a, Http: HttpClient> { + #[serde(skip_serializing)] + pub index: &'a Index, + + /// The fields that should appear in the documents. By default, all of the fields are present. + #[serde(skip_serializing_if = "Option::is_none")] + pub fields: Option>, +} + +impl<'a, Http: HttpClient> DocumentQuery<'a, Http> { + #[must_use] + pub fn new(index: &Index) -> DocumentQuery { + DocumentQuery { + index, + fields: None, + } + } + + /// Specify the fields to return in the document. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, documents::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let index = client.index("document_query_with_fields"); + /// let mut document_query = DocumentQuery::new(&index); + /// + /// document_query.with_fields(["title"]); + /// ``` + pub fn with_fields( + &mut self, + fields: impl IntoIterator, + ) -> &mut DocumentQuery<'a, Http> { + self.fields = Some(fields.into_iter().collect()); + self + } + + /// Execute the get document query. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, documents::*}; + /// # use serde::{Deserialize, Serialize}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// #[derive(Debug, Serialize, Deserialize, PartialEq)] + /// struct MyObject { + /// id: String, + /// kind: String, + /// } + /// + /// #[derive(Debug, Serialize, Deserialize, PartialEq)] + /// struct MyObjectReduced { + /// id: String, + /// } + /// # let index = client.index("document_query_execute"); + /// # index.add_or_replace(&[MyObject{id:"1".to_string(), kind:String::from("a kind")},MyObject{id:"2".to_string(), kind:String::from("some kind")}], None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// + /// let document = DocumentQuery::new(&index).with_fields(["id"]) + /// .execute::("1") + /// .await + /// .unwrap(); + /// + /// assert_eq!( + /// document, + /// MyObjectReduced { id: "1".to_string() } + /// ); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + pub async fn execute( + &self, + document_id: &str, + ) -> Result { + self.index.get_document_with::(document_id, self).await + } +} + +#[derive(Debug, Clone, Serialize)] +pub struct DocumentsQuery<'a, Http: HttpClient> { + #[serde(skip_serializing)] + pub index: &'a Index, + + /// The number of documents to skip. + /// + /// If the value of the parameter `offset` is `n`, the `n` first documents will not be returned. + /// This is helpful for pagination. + /// + /// Example: If you want to skip the first document, set offset to `1`. + #[serde(skip_serializing_if = "Option::is_none")] + pub offset: Option, + + /// The maximum number of documents returned. + /// If the value of the parameter `limit` is `n`, there will never be more than `n` documents in the response. + /// This is helpful for pagination. + /// + /// Example: If you don't want to get more than two documents, set limit to `2`. + /// + /// **Default: `20`** + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + + /// The fields that should appear in the documents. By default, all of the fields are present. + #[serde(skip_serializing_if = "Option::is_none")] + pub fields: Option>, + + /// Filters to apply. + /// + /// Available since v1.2 of Meilisearch + /// Read the [dedicated guide](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering#filter-basics) to learn the syntax. + #[serde(skip_serializing_if = "Option::is_none")] + pub filter: Option<&'a str>, +} + +impl<'a, Http: HttpClient> DocumentsQuery<'a, Http> { + #[must_use] + pub fn new(index: &Index) -> DocumentsQuery { + DocumentsQuery { + index, + offset: None, + limit: None, + fields: None, + filter: None, + } + } + + /// Specify the offset. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, documents::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let index = client.index("my_index"); + /// + /// let mut documents_query = DocumentsQuery::new(&index).with_offset(1); + /// ``` + pub fn with_offset(&mut self, offset: usize) -> &mut DocumentsQuery<'a, Http> { + self.offset = Some(offset); + self + } + + /// Specify the limit. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, documents::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let index = client.index("my_index"); + /// + /// let mut documents_query = DocumentsQuery::new(&index); + /// + /// documents_query.with_limit(1); + /// ``` + pub fn with_limit(&mut self, limit: usize) -> &mut DocumentsQuery<'a, Http> { + self.limit = Some(limit); + self + } + + /// Specify the fields to return in the documents. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, documents::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let index = client.index("my_index"); + /// + /// let mut documents_query = DocumentsQuery::new(&index); + /// + /// documents_query.with_fields(["title"]); + /// ``` + pub fn with_fields( + &mut self, + fields: impl IntoIterator, + ) -> &mut DocumentsQuery<'a, Http> { + self.fields = Some(fields.into_iter().collect()); + self + } + + pub fn with_filter<'b>(&'b mut self, filter: &'a str) -> &'b mut DocumentsQuery<'a, Http> { + self.filter = Some(filter); + self + } + + /// Execute the get documents query. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, documents::*}; + /// # use serde::{Deserialize, Serialize}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let index = client.create_index("documents_query_execute", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap().try_make_index(&client).unwrap(); + /// #[derive(Debug, Serialize, Deserialize, PartialEq)] + /// struct MyObject { + /// id: Option, + /// kind: String, + /// } + /// let index = client.index("documents_query_execute"); + /// + /// let document = DocumentsQuery::new(&index) + /// .with_offset(1) + /// .execute::() + /// .await + /// .unwrap(); + /// + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn execute( + &self, + ) -> Result, Error> { + self.index.get_documents_with::(self).await + } +} + +#[derive(Debug, Clone, Serialize)] +pub struct DocumentDeletionQuery<'a, Http: HttpClient> { + #[serde(skip_serializing)] + pub index: &'a Index, + + /// Filters to apply. + /// + /// Read the [dedicated guide](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering#filter-basics) to learn the syntax. + pub filter: Option<&'a str>, +} + +impl<'a, Http: HttpClient> DocumentDeletionQuery<'a, Http> { + #[must_use] + pub fn new(index: &Index) -> DocumentDeletionQuery { + DocumentDeletionQuery { + index, + filter: None, + } + } + + pub fn with_filter<'b>( + &'b mut self, + filter: &'a str, + ) -> &'b mut DocumentDeletionQuery<'a, Http> { + self.filter = Some(filter); + self + } + + pub async fn execute(&self) -> Result { + self.index.delete_documents_with(self).await + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{client::Client, errors::*, indexes::*}; + use meilisearch_test_macro::meilisearch_test; + use serde::{Deserialize, Serialize}; + + #[derive(Debug, Serialize, Deserialize, PartialEq)] + struct MyObject { + id: Option, + kind: String, + } + + #[allow(unused)] + #[derive(IndexConfig)] + struct MovieClips { + #[index_config(primary_key)] + movie_id: u64, + #[index_config(distinct)] + owner: String, + #[index_config(displayed, searchable)] + title: String, + #[index_config(displayed)] + description: String, + #[index_config(filterable, sortable, displayed)] + release_date: String, + #[index_config(filterable, displayed)] + genres: Vec, + } + + #[allow(unused)] + #[derive(IndexConfig)] + struct VideoClips { + video_id: u64, + } + + async fn setup_test_index(client: &Client, index: &Index) -> Result<(), Error> { + let t0 = index + .add_documents( + &[ + MyObject { + id: Some(0), + kind: "text".into(), + }, + MyObject { + id: Some(1), + kind: "text".into(), + }, + MyObject { + id: Some(2), + kind: "title".into(), + }, + MyObject { + id: Some(3), + kind: "title".into(), + }, + ], + None, + ) + .await?; + + t0.wait_for_completion(client, None, None).await?; + + Ok(()) + } + + #[meilisearch_test] + async fn test_get_documents_with_execute(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + let documents = DocumentsQuery::new(&index) + .with_limit(1) + .with_offset(1) + .with_fields(["kind"]) + .execute::() + .await + .unwrap(); + + assert_eq!(documents.limit, 1); + assert_eq!(documents.offset, 1); + assert_eq!(documents.results.len(), 1); + + Ok(()) + } + + #[meilisearch_test] + async fn test_delete_documents_with(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + index + .set_filterable_attributes(["id"]) + .await? + .wait_for_completion(&client, None, None) + .await?; + + let mut query = DocumentDeletionQuery::new(&index); + query.with_filter("id = 1"); + index + .delete_documents_with(&query) + .await? + .wait_for_completion(&client, None, None) + .await?; + let document_result = index.get_document::("1").await; + + match document_result { + Ok(_) => panic!("The test was expecting no documents to be returned but got one."), + Err(e) => match e { + Error::Meilisearch(err) => { + assert_eq!(err.error_code, ErrorCode::DocumentNotFound); + } + _ => panic!("The error was expected to be a Meilisearch error, but it was not."), + }, + } + + Ok(()) + } + + #[meilisearch_test] + async fn test_delete_documents_with_filter_not_filterable( + client: Client, + index: Index, + ) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let mut query = DocumentDeletionQuery::new(&index); + query.with_filter("id = 1"); + let error = index + .delete_documents_with(&query) + .await? + .wait_for_completion(&client, None, None) + .await?; + + let error = error.unwrap_failure(); + + assert!(matches!( + error, + MeilisearchError { + error_code: ErrorCode::InvalidDocumentFilter, + error_type: ErrorType::InvalidRequest, + .. + } + )); + + Ok(()) + } + + #[meilisearch_test] + async fn test_get_documents_with_only_one_param( + client: Client, + index: Index, + ) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + // let documents = index.get_documents(None, None, None).await.unwrap(); + let documents = DocumentsQuery::new(&index) + .with_limit(1) + .execute::() + .await + .unwrap(); + + assert_eq!(documents.limit, 1); + assert_eq!(documents.offset, 0); + assert_eq!(documents.results.len(), 1); + + Ok(()) + } + + #[meilisearch_test] + async fn test_get_documents_with_filter(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + index + .set_filterable_attributes(["id"]) + .await + .unwrap() + .wait_for_completion(&client, None, None) + .await + .unwrap(); + + let documents = DocumentsQuery::new(&index) + .with_filter("id = 1") + .execute::() + .await?; + + assert_eq!(documents.results.len(), 1); + + Ok(()) + } + + #[meilisearch_test] + async fn test_get_documents_with_error_hint() -> Result<(), Error> { + let meilisearch_url = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + let client = Client::new(format!("{meilisearch_url}/hello"), Some("masterKey")).unwrap(); + let index = client.index("test_get_documents_with_filter_wrong_ms_version"); + + let documents = DocumentsQuery::new(&index) + .with_filter("id = 1") + .execute::() + .await; + + let error = documents.unwrap_err(); + + let message = Some("Hint: It might not be working because you're not up to date with the Meilisearch version that updated the get_documents_with method.".to_string()); + let url = format!( + "{meilisearch_url}/hello/indexes/test_get_documents_with_filter_wrong_ms_version/documents/fetch" + ); + let status_code = 404; + let displayed_error = format!("MeilisearchCommunicationError: The server responded with a 404. Hint: It might not be working because you're not up to date with the Meilisearch version that updated the get_documents_with method.\nurl: {meilisearch_url}/hello/indexes/test_get_documents_with_filter_wrong_ms_version/documents/fetch"); + + match &error { + Error::MeilisearchCommunication(error) => { + assert_eq!(error.status_code, status_code); + assert_eq!(error.message, message); + assert_eq!(error.url, url); + } + _ => panic!("The error was expected to be a MeilisearchCommunicationError error, but it was not."), + }; + assert_eq!(format!("{error}"), displayed_error); + + Ok(()) + } + + #[meilisearch_test] + async fn test_get_documents_with_error_hint_meilisearch_api_error( + index: Index, + client: Client, + ) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let error = DocumentsQuery::new(&index) + .with_filter("id = 1") + .execute::() + .await + .unwrap_err(); + + let message = "Attribute `id` is not filterable. This index does not have configured filterable attributes. +1:3 id = 1 +Hint: It might not be working because you're not up to date with the Meilisearch version that updated the get_documents_with method.".to_string(); + let displayed_error = "Meilisearch invalid_request: invalid_document_filter: Attribute `id` is not filterable. This index does not have configured filterable attributes. +1:3 id = 1 +Hint: It might not be working because you're not up to date with the Meilisearch version that updated the get_documents_with method.. https://docs.meilisearch.com/errors#invalid_document_filter"; + + match &error { + Error::Meilisearch(error) => { + assert_eq!(error.error_message, message); + } + _ => panic!("The error was expected to be a MeilisearchCommunicationError error, but it was not."), + }; + assert_eq!(format!("{error}"), displayed_error); + + Ok(()) + } + + #[meilisearch_test] + async fn test_get_documents_with_invalid_filter( + client: Client, + index: Index, + ) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + // Does not work because `id` is not filterable + let error = DocumentsQuery::new(&index) + .with_filter("id = 1") + .execute::() + .await + .unwrap_err(); + + assert!(matches!( + error, + Error::Meilisearch(MeilisearchError { + error_code: ErrorCode::InvalidDocumentFilter, + error_type: ErrorType::InvalidRequest, + .. + }) + )); + + Ok(()) + } + + #[meilisearch_test] + async fn test_settings_generated_by_macro(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let movie_settings: Settings = MovieClips::generate_settings(); + let video_settings: Settings = VideoClips::generate_settings(); + + assert_eq!(movie_settings.searchable_attributes.unwrap(), ["title"]); + assert!(video_settings.searchable_attributes.unwrap().is_empty()); + + assert_eq!( + movie_settings.displayed_attributes.unwrap(), + ["title", "description", "release_date", "genres"] + ); + assert!(video_settings.displayed_attributes.unwrap().is_empty()); + + assert_eq!( + movie_settings.filterable_attributes.unwrap(), + ["release_date", "genres"] + ); + assert!(video_settings.filterable_attributes.unwrap().is_empty()); + + assert_eq!( + movie_settings.sortable_attributes.unwrap(), + ["release_date"] + ); + assert!(video_settings.sortable_attributes.unwrap().is_empty()); + + Ok(()) + } + + #[meilisearch_test] + async fn test_generate_index(client: Client) -> Result<(), Error> { + let index: Index = MovieClips::generate_index(&client).await.unwrap(); + + assert_eq!(index.uid, "movie_clips"); + + index + .delete() + .await? + .wait_for_completion(&client, None, None) + .await?; + + Ok(()) + } + #[derive(Serialize, Deserialize, IndexConfig)] + struct Movie { + #[index_config(primary_key)] + movie_id: u64, + #[index_config(displayed, searchable)] + title: String, + #[index_config(displayed)] + description: String, + #[index_config(filterable, sortable, displayed)] + release_date: String, + #[index_config(filterable, displayed)] + genres: Vec, + } +} diff --git a/backend/vendor/meilisearch-sdk/src/dumps.rs b/backend/vendor/meilisearch-sdk/src/dumps.rs new file mode 100644 index 000000000..e9b093731 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/dumps.rs @@ -0,0 +1,130 @@ +//! The `dumps` module allows the creation of database dumps. +//! +//! - Dumps are `.dump` files that can be used to launch Meilisearch. +//! +//! - Dumps are compatible between Meilisearch versions. +//! +//! - Creating a dump is also referred to as exporting it, whereas launching Meilisearch with a dump is referred to as importing it. +//! +//! - During a [dump export](Client::create_dump), all [indexes](crate::indexes::Index) of the current instance are exported—together with their documents and settings—and saved as a single `.dump` file. +//! +//! - During a dump import, all indexes contained in the indicated `.dump` file are imported along with their associated documents and [settings](crate::settings::Settings). +//! Any existing [index](crate::indexes::Index) with the same uid as an index in the dump file will be overwritten. +//! +//! - Dump imports are [performed at launch](https://www.meilisearch.com/docs/learn/configuration/instance_options#import-dump) using an option. +//! +//! # Example +//! +//! ``` +//! # use meilisearch_sdk::{client::*, errors::*, dumps::*, dumps::*, task_info::*, tasks::*}; +//! # use futures_await_test::async_test; +//! # use std::{thread::sleep, time::Duration}; +//! # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { +//! # +//! # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); +//! # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); +//! # +//! # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); +//! +//! // Create a dump +//! let task_info = client.create_dump().await.unwrap(); +//! assert!(matches!( +//! task_info, +//! TaskInfo { +//! update_type: TaskType::DumpCreation { .. }, +//! .. +//! } +//! )); +//! # }); +//! ``` + +use crate::{client::Client, errors::Error, request::*, task_info::TaskInfo}; + +/// Dump related methods. +/// See the [dumps](crate::dumps) module. +impl Client { + /// Triggers a dump creation process. + /// + /// Once the process is complete, a dump is created in the [dumps directory](https://www.meilisearch.com/docs/learn/configuration/instance_options#dump-directory). + /// If the dumps directory does not exist yet, it will be created. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, errors::*, dumps::*, dumps::*, task_info::*, tasks::*}; + /// # use futures_await_test::async_test; + /// # use std::{thread::sleep, time::Duration}; + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # + /// let task_info = client.create_dump().await.unwrap(); + /// + /// assert!(matches!( + /// task_info, + /// TaskInfo { + /// update_type: TaskType::DumpCreation { .. }, + /// .. + /// } + /// )); + /// # }); + /// ``` + pub async fn create_dump(&self) -> Result { + self.http_client + .request::<(), (), TaskInfo>( + &format!("{}/dumps", self.host), + Method::Post { + query: (), + body: (), + }, + 202, + ) + .await + } +} + +/// Alias for [`create_dump`](Client::create_dump). +pub async fn create_dump(client: &Client) -> Result { + client.create_dump().await +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{client::*, tasks::*}; + use meilisearch_test_macro::meilisearch_test; + use std::time::Duration; + + #[meilisearch_test] + async fn test_dumps_success_creation(client: Client) -> Result<(), Error> { + let task = client + .create_dump() + .await? + .wait_for_completion( + &client, + Some(Duration::from_millis(1)), + Some(Duration::from_millis(6000)), + ) + .await?; + + assert!(matches!(task, Task::Succeeded { .. })); + Ok(()) + } + + #[meilisearch_test] + async fn test_dumps_correct_update_type(client: Client) -> Result<(), Error> { + let task_info = client.create_dump().await.unwrap(); + + assert!(matches!( + task_info, + TaskInfo { + update_type: TaskType::DumpCreation { .. }, + .. + } + )); + Ok(()) + } +} diff --git a/backend/vendor/meilisearch-sdk/src/errors.rs b/backend/vendor/meilisearch-sdk/src/errors.rs new file mode 100644 index 000000000..44b853861 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/errors.rs @@ -0,0 +1,421 @@ +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +/// An enum representing the errors that can occur. + +#[derive(Debug, Error)] +#[non_exhaustive] +pub enum Error { + /// The exhaustive list of Meilisearch errors: + /// + /// Also check out: + #[error(transparent)] + Meilisearch(#[from] MeilisearchError), + + #[error(transparent)] + MeilisearchCommunication(#[from] MeilisearchCommunicationError), + /// The Meilisearch server returned an invalid JSON for a request. + #[error("Error parsing response JSON: {}", .0)] + ParseError(#[from] serde_json::Error), + + /// A timeout happened while waiting for an update to complete. + #[error("A task did not succeed in time.")] + Timeout, + /// This Meilisearch SDK generated an invalid request (which was not sent). + /// + /// It probably comes from an invalid API key resulting in an invalid HTTP header. + #[error("Unable to generate a valid HTTP request. It probably comes from an invalid API key.")] + InvalidRequest, + + /// Can't call this method without setting an api key in the client. + #[error("You need to provide an api key to use the `{0}` method.")] + CantUseWithoutApiKey(String), + /// It is not possible to generate a tenant token with an invalid api key. + /// + /// Empty strings or with less than 8 characters are considered invalid. + #[error("The provided api_key is invalid.")] + TenantTokensInvalidApiKey, + /// It is not possible to generate an already expired tenant token. + #[error("The provided expires_at is already expired.")] + TenantTokensExpiredSignature, + + /// When jsonwebtoken cannot generate the token successfully. + #[cfg(not(target_arch = "wasm32"))] + #[error("Impossible to generate the token, jsonwebtoken encountered an error: {}", .0)] + InvalidTenantToken(#[from] jsonwebtoken::errors::Error), + + /// The http client encountered an error. + #[cfg(feature = "reqwest")] + #[error("HTTP request failed: {}", .0)] + HttpError(#[from] reqwest::Error), + + // The library formatting the query parameters encountered an error. + #[error("Internal Error: could not parse the query parameters: {}", .0)] + Yaup(#[from] yaup::Error), + + // The library validating the format of an uuid. + #[cfg(not(target_arch = "wasm32"))] + #[error("The uid of the token has bit an uuid4 format: {}", .0)] + Uuid(#[from] uuid::Error), + + // Error thrown in case the version of the Uuid is not v4. + #[error("The uid provided to the token is not of version uuidv4")] + InvalidUuid4Version, + + #[error(transparent)] + Other(Box), +} + +#[derive(Debug, Clone, Deserialize, Error)] +#[serde(rename_all = "camelCase")] +pub struct MeilisearchCommunicationError { + pub status_code: u16, + pub message: Option, + pub url: String, +} + +impl std::fmt::Display for MeilisearchCommunicationError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "MeilisearchCommunicationError: The server responded with a {}.", + self.status_code + )?; + if let Some(message) = &self.message { + write!(f, " {message}")?; + } + write!(f, "\nurl: {}", self.url)?; + Ok(()) + } +} + +#[derive(Debug, Clone, Deserialize, Error)] +#[serde(rename_all = "camelCase")] +#[error("Meilisearch {}: {}: {}. {}", .error_type, .error_code, .error_message, .error_link)] +pub struct MeilisearchError { + /// The human readable error message + #[serde(rename = "message")] + pub error_message: String, + /// The error code of the error. Officially documented at + /// . + #[serde(rename = "code")] + pub error_code: ErrorCode, + /// The type of error (invalid request, internal error, or authentication error) + #[serde(rename = "type")] + pub error_type: ErrorType, + /// A link to the Meilisearch documentation for an error. + #[serde(rename = "link")] + pub error_link: String, +} + +/// The type of error that was encountered. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +#[non_exhaustive] +pub enum ErrorType { + /// The submitted request was invalid. + InvalidRequest, + /// The Meilisearch instance encountered an internal error. + Internal, + /// Authentication was either incorrect or missing. + Auth, + + /// That's unexpected. Please open a GitHub issue after ensuring you are + /// using the supported version of the Meilisearch server. + #[serde(other)] + Unknown, +} + +impl std::fmt::Display for ErrorType { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { + write!( + fmt, + "{}", + // this can't fail + serde_json::to_value(self).unwrap().as_str().unwrap() + ) + } +} + +/// The error code. +/// +/// Officially documented at . +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +#[non_exhaustive] +pub enum ErrorCode { + IndexCreationFailed, + IndexAlreadyExists, + IndexNotFound, + InvalidIndexUid, + InvalidState, + PrimaryKeyInferenceFailed, + IndexPrimaryKeyAlreadyPresent, + InvalidStoreFile, + MaxFieldsLimitExceeded, + MissingDocumentId, + InvalidDocumentId, + BadParameter, + BadRequest, + DatabaseSizeLimitReached, + DocumentNotFound, + InternalError, + InvalidApiKey, + MissingAuthorizationHeader, + TaskNotFound, + DumpNotFound, + MissingMasterKey, + NoSpaceLeftOnDevice, + PayloadTooLarge, + UnretrievableDocument, + SearchError, + UnsupportedMediaType, + DumpAlreadyProcessing, + DumpProcessFailed, + MissingContentType, + MalformedPayload, + InvalidContentType, + MissingPayload, + InvalidApiKeyDescription, + InvalidApiKeyActions, + InvalidApiKeyIndexes, + InvalidApiKeyExpiresAt, + ApiKeyNotFound, + MissingTaskFilters, + MissingIndexUid, + InvalidIndexOffset, + InvalidIndexLimit, + InvalidIndexPrimaryKey, + InvalidDocumentFilter, + MissingDocumentFilter, + InvalidDocumentFields, + InvalidDocumentLimit, + InvalidDocumentOffset, + InvalidDocumentGeoField, + InvalidSearchQ, + InvalidSearchOffset, + InvalidSearchLimit, + InvalidSearchPage, + InvalidSearchHitsPerPage, + InvalidSearchAttributesToRetrieve, + InvalidSearchAttributesToCrop, + InvalidSearchCropLength, + InvalidSearchAttributesToHighlight, + InvalidSearchShowMatchesPosition, + InvalidSearchFilter, + InvalidSearchSort, + InvalidSearchFacets, + InvalidSearchHighlightPreTag, + InvalidSearchHighlightPostTag, + InvalidSearchCropMarker, + InvalidSearchMatchingStrategy, + ImmutableApiKeyUid, + ImmutableApiKeyActions, + ImmutableApiKeyIndexes, + ImmutableExpiresAt, + ImmutableCreatedAt, + ImmutableUpdatedAt, + InvalidSwapDuplicateIndexFound, + InvalidSwapIndexes, + MissingSwapIndexes, + InvalidTaskTypes, + InvalidTaskUids, + InvalidTaskStatuses, + InvalidTaskLimit, + InvalidTaskFrom, + InvalidTaskCanceledBy, + InvalidTaskFilters, + TooManyOpenFiles, + IoError, + InvalidTaskIndexUids, + ImmutableIndexUid, + ImmutableIndexCreatedAt, + ImmutableIndexUpdatedAt, + InvalidSettingsDisplayedAttributes, + InvalidSettingsSearchableAttributes, + InvalidSettingsFilterableAttributes, + InvalidSettingsSortableAttributes, + InvalidSettingsRankingRules, + InvalidSettingsStopWords, + InvalidSettingsSynonyms, + InvalidSettingsDistinctAttributes, + InvalidSettingsTypoTolerance, + InvalidSettingsFaceting, + InvalidSettingsDictionary, + InvalidSettingsPagination, + InvalidTaskBeforeEnqueuedAt, + InvalidTaskAfterEnqueuedAt, + InvalidTaskBeforeStartedAt, + InvalidTaskAfterStartedAt, + InvalidTaskBeforeFinishedAt, + InvalidTaskAfterFinishedAt, + MissingApiKeyActions, + MissingApiKeyIndexes, + MissingApiKeyExpiresAt, + InvalidApiKeyLimit, + InvalidApiKeyOffset, + + /// That's unexpected. Please open a GitHub issue after ensuring you are + /// using the supported version of the Meilisearch server. + #[serde(other)] + Unknown, +} + +pub const MEILISEARCH_VERSION_HINT: &str = "Hint: It might not be working because you're not up to date with the Meilisearch version that updated the get_documents_with method"; + +impl std::fmt::Display for ErrorCode { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { + write!( + fmt, + "{}", + // this can't fail + serde_json::to_value(self).unwrap().as_str().unwrap() + ) + } +} + +#[cfg(test)] +mod test { + use super::*; + + use jsonwebtoken::errors::ErrorKind::InvalidToken; + use meilisearch_test_macro::meilisearch_test; + use uuid::Uuid; + + #[meilisearch_test] + async fn test_meilisearch_error() { + let error: MeilisearchError = serde_json::from_str( + r#" +{ + "message": "The cool error message.", + "code": "index_creation_failed", + "type": "internal", + "link": "https://the best link ever" +}"#, + ) + .unwrap(); + + assert_eq!(error.error_message, "The cool error message."); + assert_eq!(error.error_code, ErrorCode::IndexCreationFailed); + assert_eq!(error.error_type, ErrorType::Internal); + assert_eq!(error.error_link, "https://the best link ever"); + + let error: MeilisearchError = serde_json::from_str( + r#" +{ + "message": "", + "code": "An unknown error", + "type": "An unknown type", + "link": "" +}"#, + ) + .unwrap(); + + assert_eq!(error.error_code, ErrorCode::Unknown); + assert_eq!(error.error_type, ErrorType::Unknown); + } + + #[meilisearch_test] + async fn test_error_message_parsing() { + let error: MeilisearchError = serde_json::from_str( + r#" +{ + "message": "The cool error message.", + "code": "index_creation_failed", + "type": "internal", + "link": "https://the best link ever" +}"#, + ) + .unwrap(); + + assert_eq!(error.to_string(), "Meilisearch internal: index_creation_failed: The cool error message.. https://the best link ever"); + + let error: MeilisearchCommunicationError = MeilisearchCommunicationError { + status_code: 404, + message: Some("Hint: something.".to_string()), + url: "http://localhost:7700/something".to_string(), + }; + + assert_eq!( + error.to_string(), + "MeilisearchCommunicationError: The server responded with a 404. Hint: something.\nurl: http://localhost:7700/something" + ); + + let error: MeilisearchCommunicationError = MeilisearchCommunicationError { + status_code: 404, + message: None, + url: "http://localhost:7700/something".to_string(), + }; + + assert_eq!( + error.to_string(), + "MeilisearchCommunicationError: The server responded with a 404.\nurl: http://localhost:7700/something" + ); + + let error = Error::Timeout; + assert_eq!(error.to_string(), "A task did not succeed in time."); + + let error = Error::InvalidRequest; + assert_eq!( + error.to_string(), + "Unable to generate a valid HTTP request. It probably comes from an invalid API key." + ); + + let error = Error::TenantTokensInvalidApiKey; + assert_eq!(error.to_string(), "The provided api_key is invalid."); + + let error = Error::TenantTokensExpiredSignature; + assert_eq!( + error.to_string(), + "The provided expires_at is already expired." + ); + + let error = Error::InvalidUuid4Version; + assert_eq!( + error.to_string(), + "The uid provided to the token is not of version uuidv4" + ); + + let error = Error::Uuid(Uuid::parse_str("67e55044").unwrap_err()); + assert_eq!(error.to_string(), "The uid of the token has bit an uuid4 format: invalid length: expected length 32 for simple format, found 8"); + + let data = r#" + { + "name": "John Doe" + "age": 43, + }"#; + + let error = Error::ParseError(serde_json::from_str::(data).unwrap_err()); + assert_eq!( + error.to_string(), + "Error parsing response JSON: invalid type: map, expected a string at line 2 column 8" + ); + + let error = Error::HttpError( + reqwest::Client::new() + .execute(reqwest::Request::new( + reqwest::Method::POST, + // there will never be a `meilisearch.gouv.fr` addr since these domain name are controlled by the state of france + reqwest::Url::parse("https://meilisearch.gouv.fr").unwrap(), + )) + .await + .unwrap_err(), + ); + assert_eq!( + error.to_string(), + "HTTP request failed: error sending request for url (https://meilisearch.gouv.fr/)" + ); + + let error = Error::InvalidTenantToken(jsonwebtoken::errors::Error::from(InvalidToken)); + assert_eq!( + error.to_string(), + "Impossible to generate the token, jsonwebtoken encountered an error: InvalidToken" + ); + + let error = Error::Yaup(yaup::Error::Custom("Test yaup error".to_string())); + assert_eq!( + error.to_string(), + "Internal Error: could not parse the query parameters: Test yaup error" + ); + } +} diff --git a/backend/vendor/meilisearch-sdk/src/features.rs b/backend/vendor/meilisearch-sdk/src/features.rs new file mode 100644 index 000000000..3001beb48 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/features.rs @@ -0,0 +1,132 @@ +use crate::{ + client::Client, + errors::Error, + request::{HttpClient, Method}, +}; +use serde::{Deserialize, Serialize}; + +/// Struct representing the experimental features result from the API. +#[derive(Clone, Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ExperimentalFeaturesResult { + pub vector_store: bool, +} + +/// Struct representing the experimental features request. +/// +/// You can build this struct using the builder pattern. +/// +/// # Example +/// +/// ``` +/// # use meilisearch_sdk::{client::Client, features::ExperimentalFeatures}; +/// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); +/// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); +/// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); +/// let mut features = ExperimentalFeatures::new(&client); +/// features.set_vector_store(true); +/// ``` +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ExperimentalFeatures<'a, Http: HttpClient> { + #[serde(skip_serializing)] + client: &'a Client, + #[serde(skip_serializing_if = "Option::is_none")] + pub vector_store: Option, +} + +impl<'a, Http: HttpClient> ExperimentalFeatures<'a, Http> { + #[must_use] + pub fn new(client: &'a Client) -> Self { + ExperimentalFeatures { + client, + vector_store: None, + } + } + + pub fn set_vector_store(&mut self, vector_store: bool) -> &mut Self { + self.vector_store = Some(vector_store); + self + } + + /// Get all the experimental features + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::Client, features::ExperimentalFeatures}; + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let features = ExperimentalFeatures::new(&client); + /// features.get().await.unwrap(); + /// }); + /// ``` + pub async fn get(&self) -> Result { + self.client + .http_client + .request::<(), (), ExperimentalFeaturesResult>( + &format!("{}/experimental-features", self.client.host), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Update the experimental features + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::Client, features::ExperimentalFeatures}; + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let mut features = ExperimentalFeatures::new(&client); + /// features.set_vector_store(true); + /// features.update().await.unwrap(); + /// }); + /// ``` + pub async fn update(&self) -> Result { + self.client + .http_client + .request::<(), &Self, ExperimentalFeaturesResult>( + &format!("{}/experimental-features", self.client.host), + Method::Patch { + query: (), + body: self, + }, + 200, + ) + .await + } +} + +#[cfg(test)] +mod tests { + use super::*; + use meilisearch_test_macro::meilisearch_test; + + #[meilisearch_test] + async fn test_experimental_features_get(client: Client) { + let mut features = ExperimentalFeatures::new(&client); + features.set_vector_store(false); + let _ = features.update().await.unwrap(); + + let res = features.get().await.unwrap(); + + assert!(!res.vector_store); + } + + #[meilisearch_test] + async fn test_experimental_features_enable_vector_store(client: Client) { + let mut features = ExperimentalFeatures::new(&client); + features.set_vector_store(true); + + let res = features.update().await.unwrap(); + + assert!(res.vector_store); + } +} diff --git a/backend/vendor/meilisearch-sdk/src/indexes.rs b/backend/vendor/meilisearch-sdk/src/indexes.rs new file mode 100644 index 000000000..05c728f1b --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/indexes.rs @@ -0,0 +1,2265 @@ +use crate::{ + client::Client, + documents::{DocumentDeletionQuery, DocumentQuery, DocumentsQuery, DocumentsResults}, + errors::{Error, MeilisearchCommunicationError, MeilisearchError, MEILISEARCH_VERSION_HINT}, + request::*, + search::*, + task_info::TaskInfo, + tasks::*, + DefaultHttpClient, +}; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use std::{collections::HashMap, fmt::Display, time::Duration}; +use time::OffsetDateTime; + +/// A Meilisearch [index](https://www.meilisearch.com/docs/learn/core_concepts/indexes). +/// +/// # Example +/// +/// You can create an index remotely and, if that succeed, make an `Index` out of it. +/// See the [`Client::create_index`] method. +/// ``` +/// # use meilisearch_sdk::{client::*, indexes::*}; +/// # +/// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); +/// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); +/// # +/// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { +/// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); +/// +/// // get the index called movies or create it if it does not exist +/// let movies = client +/// .create_index("index", None) +/// .await +/// .unwrap() +/// // We wait for the task to execute until completion +/// .wait_for_completion(&client, None, None) +/// .await +/// .unwrap() +/// // Once the task finished, we try to create an `Index` out of it +/// .try_make_index(&client) +/// .unwrap(); +/// +/// assert_eq!(movies.as_ref(), "index"); +/// # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); +/// # }); +/// ``` +/// +/// Or, if you know the index already exist remotely you can create an [Index] with its builder. +/// ``` +/// # use meilisearch_sdk::{client::*, indexes::*}; +/// # +/// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); +/// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); +/// # +/// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { +/// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); +/// +/// // Meilisearch would be able to create the index if it does not exist during: +/// // - the documents addition (add and update routes) +/// // - the settings update +/// let movies = Index::new("movies", client); +/// +/// assert_eq!(movies.uid, "movies"); +/// # }); +/// ``` +#[derive(Debug, Serialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct Index { + #[serde(skip_serializing)] + pub client: Client, + pub uid: String, + #[serde(with = "time::serde::rfc3339::option")] + pub updated_at: Option, + #[serde(with = "time::serde::rfc3339::option")] + pub created_at: Option, + pub primary_key: Option, +} + +impl Index { + pub fn new(uid: impl Into, client: Client) -> Index { + Index { + uid: uid.into(), + client, + primary_key: None, + created_at: None, + updated_at: None, + } + } + /// Internal Function to create an [Index] from `serde_json::Value` and [Client]. + pub(crate) fn from_value( + raw_index: serde_json::Value, + client: Client, + ) -> Result, Error> { + #[derive(Deserialize, Debug)] + #[allow(non_snake_case)] + struct IndexFromSerde { + uid: String, + #[serde(with = "time::serde::rfc3339::option")] + updatedAt: Option, + #[serde(with = "time::serde::rfc3339::option")] + createdAt: Option, + primaryKey: Option, + } + + let i: IndexFromSerde = serde_json::from_value(raw_index).map_err(Error::ParseError)?; + + Ok(Index { + uid: i.uid, + client, + created_at: i.createdAt, + updated_at: i.updatedAt, + primary_key: i.primaryKey, + }) + } + + /// Update an [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, task_info::*, tasks::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # let mut index = client + /// # .create_index("index_update", None) + /// # .await + /// # .unwrap() + /// # .wait_for_completion(&client, None, None) + /// # .await + /// # .unwrap() + /// # // Once the task finished, we try to create an `Index` out of it + /// # .try_make_index(&client) + /// # .unwrap(); + /// # + /// index.primary_key = Some("special_id".to_string()); + /// let task = index.update() + /// .await + /// .unwrap() + /// .wait_for_completion(&client, None, None) + /// .await + /// .unwrap(); + /// + /// let index = client.get_index("index_update").await.unwrap(); + /// + /// assert_eq!(index.primary_key, Some("special_id".to_string())); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn update(&self) -> Result { + let mut index_update = IndexUpdater::new(self, &self.client); + + if let Some(ref primary_key) = self.primary_key { + index_update.with_primary_key(primary_key); + } + + index_update.execute().await + } + + /// Delete the index. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # let index = client.create_index("delete", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap().try_make_index(&client).unwrap(); + /// + /// // get the index named "movies" and delete it + /// let index = client.index("delete"); + /// let task = index.delete().await.unwrap(); + /// + /// client.wait_for_task(task, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn delete(self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!("{}/indexes/{}", self.client.host, self.uid), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Search for documents matching a specific query in the index. + /// + /// See also [`Index::search`]. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*, search::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// #[derive(Serialize, Deserialize, Debug)] + /// struct Movie { + /// name: String, + /// description: String, + /// } + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movies = client.index("execute_query"); + /// + /// // add some documents + /// # movies.add_or_replace(&[Movie{name:String::from("Interstellar"), description:String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.")},Movie{name:String::from("Unknown"), description:String::from("Unknown")}], Some("name")).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// + /// let query = SearchQuery::new(&movies).with_query("Interstellar").with_limit(5).build(); + /// let results = movies.execute_query::(&query).await.unwrap(); + /// + /// assert!(results.hits.len() > 0); + /// # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn execute_query( + &self, + body: &SearchQuery<'_, Http>, + ) -> Result, Error> { + self.client + .http_client + .request::<(), &SearchQuery, SearchResults>( + &format!("{}/indexes/{}/search", self.client.host, self.uid), + Method::Post { body, query: () }, + 200, + ) + .await + } + + /// Search for documents matching a specific query in the index. + /// + /// See also [`Index::execute_query`]. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*, search::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// #[derive(Serialize, Deserialize, Debug)] + /// struct Movie { + /// name: String, + /// description: String, + /// } + /// + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut movies = client.index("search"); + /// # // add some documents + /// # movies.add_or_replace(&[Movie{name:String::from("Interstellar"), description:String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.")},Movie{name:String::from("Unknown"), description:String::from("Unknown")}], Some("name")).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// + /// let results = movies.search() + /// .with_query("Interstellar") + /// .with_limit(5) + /// .execute::() + /// .await + /// .unwrap(); + /// + /// assert!(results.hits.len() > 0); + /// # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + #[must_use] + pub fn search(&self) -> SearchQuery { + SearchQuery::new(self) + } + + /// Get one document using its unique id. + /// + /// Serde is needed. Add `serde = {version="1.0", features=["derive"]}` in the dependencies section of your Cargo.toml. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// #[derive(Serialize, Deserialize, Debug, PartialEq)] + /// struct Movie { + /// name: String, + /// description: String + /// } + /// + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movies = client.index("get_document"); + /// # movies.add_or_replace(&[Movie{name:String::from("Interstellar"), description:String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.")}], Some("name")).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// + /// // retrieve a document (you have to put the document in the index before) + /// let interstellar = movies.get_document::("Interstellar").await.unwrap(); + /// + /// assert_eq!(interstellar, Movie { + /// name: String::from("Interstellar"), + /// description: String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage."), + /// }); + /// # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_document( + &self, + document_id: &str, + ) -> Result { + let url = format!( + "{}/indexes/{}/documents/{}", + self.client.host, self.uid, document_id + ); + self.client + .http_client + .request::<(), (), T>(&url, Method::Get { query: () }, 200) + .await + } + + /// Get one document with parameters. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, documents::*}; + /// # use serde::{Deserialize, Serialize}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// #[derive(Debug, Serialize, Deserialize, PartialEq)] + /// struct MyObject { + /// id: String, + /// kind: String, + /// } + /// + /// #[derive(Debug, Serialize, Deserialize, PartialEq)] + /// struct MyObjectReduced { + /// id: String, + /// } + /// # let index = client.index("document_query_execute"); + /// # index.add_or_replace(&[MyObject{id:"1".to_string(), kind:String::from("a kind")},MyObject{id:"2".to_string(), kind:String::from("some kind")}], None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// + /// let mut document_query = DocumentQuery::new(&index); + /// document_query.with_fields(["id"]); + /// + /// let document = index.get_document_with::("1", &document_query).await.unwrap(); + /// + /// assert_eq!( + /// document, + /// MyObjectReduced { id: "1".to_string() } + /// ); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + pub async fn get_document_with( + &self, + document_id: &str, + document_query: &DocumentQuery<'_, Http>, + ) -> Result { + let url = format!( + "{}/indexes/{}/documents/{}", + self.client.host, self.uid, document_id + ); + self.client + .http_client + .request::<&DocumentQuery, (), T>( + &url, + Method::Get { + query: document_query, + }, + 200, + ) + .await + } + + /// Get documents by batch. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// #[derive(Serialize, Deserialize, PartialEq, Debug)] + /// struct Movie { + /// name: String, + /// description: String, + /// } + /// + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movie_index = client.index("get_documents"); + /// # movie_index.add_or_replace(&[Movie{name:String::from("Interstellar"), description:String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.")}], Some("name")).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// + /// // retrieve movies (you have to put some movies in the index before) + /// let movies = movie_index.get_documents::().await.unwrap(); + /// + /// assert!(movies.results.len() > 0); + /// # movie_index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_documents( + &self, + ) -> Result, Error> { + let url = format!("{}/indexes/{}/documents", self.client.host, self.uid); + self.client + .http_client + .request::<(), (), DocumentsResults>(&url, Method::Get { query: () }, 200) + .await + } + + /// Get documents by batch with parameters. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*, documents::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// #[derive(Serialize, Deserialize, PartialEq, Debug)] + /// struct Movie { + /// name: String, + /// description: String, + /// } + /// + /// #[derive(Deserialize, Debug, PartialEq)] + /// struct ReturnedMovie { + /// name: String, + /// } + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// + /// let movie_index = client.index("get_documents_with"); + /// # movie_index.add_or_replace(&[Movie{name:String::from("Interstellar"), description:String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.")}], Some("name")).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// + /// let mut query = DocumentsQuery::new(&movie_index); + /// query.with_limit(1); + /// query.with_fields(["name"]); + /// // retrieve movies (you have to put some movies in the index before) + /// let movies = movie_index.get_documents_with::(&query).await.unwrap(); + /// + /// assert_eq!(movies.results.len(), 1); + /// # movie_index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_documents_with( + &self, + documents_query: &DocumentsQuery<'_, Http>, + ) -> Result, Error> { + if documents_query.filter.is_some() { + let url = format!("{}/indexes/{}/documents/fetch", self.client.host, self.uid); + return self + .client + .http_client + .request::<(), &DocumentsQuery, DocumentsResults>( + &url, + Method::Post { + body: documents_query, + query: (), + }, + 200, + ) + .await + .map_err(|err| match err { + Error::MeilisearchCommunication(error) => { + Error::MeilisearchCommunication(MeilisearchCommunicationError { + status_code: error.status_code, + url: error.url, + message: Some(format!("{}.", MEILISEARCH_VERSION_HINT)), + }) + } + Error::Meilisearch(error) => Error::Meilisearch(MeilisearchError { + error_code: error.error_code, + error_link: error.error_link, + error_type: error.error_type, + error_message: format!( + "{}\n{}.", + error.error_message, MEILISEARCH_VERSION_HINT + ), + }), + _ => err, + }); + } + + let url = format!("{}/indexes/{}/documents", self.client.host, self.uid); + self.client + .http_client + .request::<&DocumentsQuery, (), DocumentsResults>( + &url, + Method::Get { + query: documents_query, + }, + 200, + ) + .await + } + + /// Add a list of documents or replace them if they already exist. + /// + /// If you send an already existing document (same id) the **whole existing document** will be overwritten by the new document. + /// Fields previously in the document not present in the new document are removed. + /// + /// For a partial update of the document see [`Index::add_or_update`]. + /// + /// You can use the alias [`Index::add_documents`] if you prefer. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # use std::thread::sleep; + /// # use std::time::Duration; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// #[derive(Serialize, Deserialize, Debug)] + /// struct Movie { + /// name: String, + /// description: String, + /// } + /// + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movie_index = client.index("add_or_replace"); + /// + /// let task = movie_index.add_or_replace(&[ + /// Movie{ + /// name: String::from("Interstellar"), + /// description: String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.") + /// }, + /// Movie{ + /// // note that the id field can only take alphanumerics characters (and '-' and '/') + /// name: String::from("MrsDoubtfire"), + /// description: String::from("Loving but irresponsible dad Daniel Hillard, estranged from his exasperated spouse, is crushed by a court order allowing only weekly visits with his kids. When Daniel learns his ex needs a housekeeper, he gets the job -- disguised as an English nanny. Soon he becomes not only his children's best pal but the kind of parent he should have been from the start.") + /// }, + /// Movie{ + /// name: String::from("Apollo13"), + /// description: String::from("The true story of technical troubles that scuttle the Apollo 13 lunar mission in 1971, risking the lives of astronaut Jim Lovell and his crew, with the failed journey turning into a thrilling saga of heroism. Drifting more than 200,000 miles from Earth, the astronauts work furiously with the ground crew to avert tragedy.") + /// }, + /// ], Some("name")).await.unwrap(); + /// // Meilisearch may take some time to execute the request so we are going to wait till it's completed + /// client.wait_for_task(task, None, None).await.unwrap(); + /// + /// let movies = movie_index.get_documents::().await.unwrap(); + /// assert!(movies.results.len() >= 3); + /// # movie_index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn add_or_replace( + &self, + documents: &[T], + primary_key: Option<&str>, + ) -> Result { + let url = if let Some(primary_key) = primary_key { + format!( + "{}/indexes/{}/documents?primaryKey={}", + self.client.host, self.uid, primary_key + ) + } else { + format!("{}/indexes/{}/documents", self.client.host, self.uid) + }; + self.client + .http_client + .request::<(), &[T], TaskInfo>( + &url, + Method::Post { + query: (), + body: documents, + }, + 202, + ) + .await + } + + /// Add a raw and unchecked payload to meilisearch. + /// + /// This can be useful if your application is only forwarding data from other sources. + /// + /// If you send an already existing document (same id) the **whole existing document** will be overwritten by the new document. + /// Fields previously in the document not present in the new document are removed. + /// + /// For a partial update of the document see [`Index::add_or_update_unchecked_payload`]. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # use std::thread::sleep; + /// # use std::time::Duration; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movie_index = client.index("add_or_replace_unchecked_payload"); + /// + /// let task = movie_index.add_or_replace_unchecked_payload( + /// r#"{ "id": 1, "body": "doggo" } + /// { "id": 2, "body": "catto" }"#.as_bytes(), + /// "application/x-ndjson", + /// Some("id"), + /// ).await.unwrap(); + /// // Meilisearch may take some time to execute the request so we are going to wait till it's completed + /// client.wait_for_task(task, None, None).await.unwrap(); + /// + /// let movies = movie_index.get_documents::().await.unwrap(); + /// assert_eq!(movies.results.len(), 2); + /// # movie_index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn add_or_replace_unchecked_payload< + T: futures_io::AsyncRead + Send + Sync + 'static, + >( + &self, + payload: T, + content_type: &str, + primary_key: Option<&str>, + ) -> Result { + let url = if let Some(primary_key) = primary_key { + format!( + "{}/indexes/{}/documents?primaryKey={}", + self.client.host, self.uid, primary_key + ) + } else { + format!("{}/indexes/{}/documents", self.client.host, self.uid) + }; + self.client + .http_client + .stream_request::<(), T, TaskInfo>( + &url, + Method::Post { + query: (), + body: payload, + }, + content_type, + 202, + ) + .await + } + + /// Alias for [`Index::add_or_replace`]. + pub async fn add_documents( + &self, + documents: &[T], + primary_key: Option<&str>, + ) -> Result { + self.add_or_replace(documents, primary_key).await + } + + /// Add a raw ndjson payload and update them if they already. + /// + /// It configures the correct content type for ndjson data. + /// + /// If you send an already existing document (same id) the old document will be only partially updated according to the fields of the new document. + /// Thus, any fields not present in the new document are kept and remained unchanged. + /// + /// To completely overwrite a document, check out the [`Index::add_documents_ndjson`] documents method. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # use std::thread::sleep; + /// # use std::time::Duration; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movie_index = client.index("update_documents_ndjson"); + /// + /// let task = movie_index.update_documents_ndjson( + /// r#"{ "id": 1, "body": "doggo" } + /// { "id": 2, "body": "catto" }"#.as_bytes(), + /// Some("id"), + /// ).await.unwrap(); + /// // Meilisearch may take some time to execute the request so we are going to wait till it's completed + /// client.wait_for_task(task, None, None).await.unwrap(); + /// + /// let movies = movie_index.get_documents::().await.unwrap(); + /// assert_eq!(movies.results.len(), 2); + /// # movie_index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + #[cfg(not(target_arch = "wasm32"))] + pub async fn update_documents_ndjson( + &self, + payload: T, + primary_key: Option<&str>, + ) -> Result { + self.add_or_update_unchecked_payload(payload, "application/x-ndjson", primary_key) + .await + } + + /// Add a raw ndjson payload to meilisearch. + /// + /// It configures the correct content type for ndjson data. + /// + /// If you send an already existing document (same id) the **whole existing document** will be overwritten by the new document. + /// Fields previously in the document not present in the new document are removed. + /// + /// For a partial update of the document see [`Index::update_documents_ndjson`]. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # use std::thread::sleep; + /// # use std::time::Duration; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movie_index = client.index("add_documents_ndjson"); + /// + /// let task = movie_index.add_documents_ndjson( + /// r#"{ "id": 1, "body": "doggo" } + /// { "id": 2, "body": "catto" }"#.as_bytes(), + /// Some("id"), + /// ).await.unwrap(); + /// // Meilisearch may take some time to execute the request so we are going to wait till it's completed + /// client.wait_for_task(task, None, None).await.unwrap(); + /// + /// let movies = movie_index.get_documents::().await.unwrap(); + /// assert_eq!(movies.results.len(), 2); + /// # movie_index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + #[cfg(not(target_arch = "wasm32"))] + pub async fn add_documents_ndjson( + &self, + payload: T, + primary_key: Option<&str>, + ) -> Result { + self.add_or_replace_unchecked_payload(payload, "application/x-ndjson", primary_key) + .await + } + + /// Add a raw csv payload and update them if they already. + /// + /// It configures the correct content type for csv data. + /// + /// If you send an already existing document (same id) the old document will be only partially updated according to the fields of the new document. + /// Thus, any fields not present in the new document are kept and remained unchanged. + /// + /// To completely overwrite a document, check out the [`Index::add_documents_csv`] documents method. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # use std::thread::sleep; + /// # use std::time::Duration; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movie_index = client.index("update_documents_csv"); + /// + /// let task = movie_index.update_documents_csv( + /// "id,body\n1,\"doggo\"\n2,\"catto\"".as_bytes(), + /// Some("id"), + /// ).await.unwrap(); + /// // Meilisearch may take some time to execute the request so we are going to wait till it's completed + /// client.wait_for_task(task, None, None).await.unwrap(); + /// + /// let movies = movie_index.get_documents::().await.unwrap(); + /// assert_eq!(movies.results.len(), 2); + /// # movie_index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + #[cfg(not(target_arch = "wasm32"))] + pub async fn update_documents_csv( + &self, + payload: T, + primary_key: Option<&str>, + ) -> Result { + self.add_or_update_unchecked_payload(payload, "text/csv", primary_key) + .await + } + + /// Add a raw csv payload to meilisearch. + /// + /// It configures the correct content type for csv data. + /// + /// If you send an already existing document (same id) the **whole existing document** will be overwritten by the new document. + /// Fields previously in the document not present in the new document are removed. + /// + /// For a partial update of the document see [`Index::update_documents_csv`]. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # use std::thread::sleep; + /// # use std::time::Duration; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movie_index = client.index("add_documents_csv"); + /// + /// let task = movie_index.add_documents_csv( + /// "id,body\n1,\"doggo\"\n2,\"catto\"".as_bytes(), + /// Some("id"), + /// ).await.unwrap(); + /// // Meilisearch may take some time to execute the request so we are going to wait till it's completed + /// client.wait_for_task(task, None, None).await.unwrap(); + /// + /// let movies = movie_index.get_documents::().await.unwrap(); + /// assert_eq!(movies.results.len(), 2); + /// # movie_index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + #[cfg(not(target_arch = "wasm32"))] + pub async fn add_documents_csv( + &self, + payload: T, + primary_key: Option<&str>, + ) -> Result { + self.add_or_replace_unchecked_payload(payload, "text/csv", primary_key) + .await + } + + /// Add a list of documents and update them if they already. + /// + /// If you send an already existing document (same id) the old document will be only partially updated according to the fields of the new document. + /// Thus, any fields not present in the new document are kept and remained unchanged. + /// + /// To completely overwrite a document, check out the [`Index::add_or_replace`] documents method. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::client::*; + /// # use std::thread::sleep; + /// # use std::time::Duration; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// #[derive(Serialize, Deserialize, Debug)] + /// struct Movie { + /// name: String, + /// description: String, + /// } + /// + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movie_index = client.index("add_or_update"); + /// + /// let task = movie_index.add_or_update(&[ + /// Movie { + /// name: String::from("Interstellar"), + /// description: String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.") + /// }, + /// Movie { + /// // note that the id field can only take alphanumerics characters (and '-' and '/') + /// name: String::from("MrsDoubtfire"), + /// description: String::from("Loving but irresponsible dad Daniel Hillard, estranged from his exasperated spouse, is crushed by a court order allowing only weekly visits with his kids. When Daniel learns his ex needs a housekeeper, he gets the job -- disguised as an English nanny. Soon he becomes not only his children's best pal but the kind of parent he should have been from the start.") + /// }, + /// Movie { + /// name: String::from("Apollo13"), + /// description: String::from("The true story of technical troubles that scuttle the Apollo 13 lunar mission in 1971, risking the lives of astronaut Jim Lovell and his crew, with the failed journey turning into a thrilling saga of heroism. Drifting more than 200,000 miles from Earth, the astronauts work furiously with the ground crew to avert tragedy.") + /// }, + /// ], Some("name")).await.unwrap(); + /// + /// // Meilisearch may take some time to execute the request so we are going to wait till it's completed + /// client.wait_for_task(task, None, None).await.unwrap(); + /// + /// let movies = movie_index.get_documents::().await.unwrap(); + /// assert!(movies.results.len() >= 3); + /// # movie_index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn add_or_update( + &self, + documents: &[T], + primary_key: Option<&str>, + ) -> Result { + let url = if let Some(primary_key) = primary_key { + format!( + "{}/indexes/{}/documents?primaryKey={}", + self.client.host, self.uid, primary_key + ) + } else { + format!("{}/indexes/{}/documents", self.client.host, self.uid) + }; + self.client + .http_client + .request::<(), &[T], TaskInfo>( + &url, + Method::Put { + query: (), + body: documents, + }, + 202, + ) + .await + } + + /// Add a raw and unchecked payload to meilisearch. + /// + /// This can be useful if your application is only forwarding data from other sources. + /// + /// If you send an already existing document (same id) the old document will be only partially updated according to the fields of the new document. + /// Thus, any fields not present in the new document are kept and remained unchanged. + /// + /// To completely overwrite a document, check out the [`Index::add_or_replace_unchecked_payload`] documents method. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # use std::thread::sleep; + /// # use std::time::Duration; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movie_index = client.index("add_or_replace_unchecked_payload"); + /// + /// let task = movie_index.add_or_update_unchecked_payload( + /// r#"{ "id": 1, "body": "doggo" } + /// { "id": 2, "body": "catto" }"#.as_bytes(), + /// "application/x-ndjson", + /// Some("id"), + /// ).await.unwrap(); + /// // Meilisearch may take some time to execute the request so we are going to wait till it's completed + /// client.wait_for_task(task, None, None).await.unwrap(); + /// + /// let movies = movie_index.get_documents::().await.unwrap(); + /// + /// assert_eq!(movies.results.len(), 2); + /// # movie_index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + #[cfg(not(target_arch = "wasm32"))] + pub async fn add_or_update_unchecked_payload< + T: futures_io::AsyncRead + Send + Sync + 'static, + >( + &self, + payload: T, + content_type: &str, + primary_key: Option<&str>, + ) -> Result { + let url = if let Some(primary_key) = primary_key { + format!( + "{}/indexes/{}/documents?primaryKey={}", + self.client.host, self.uid, primary_key + ) + } else { + format!("{}/indexes/{}/documents", self.client.host, self.uid) + }; + self.client + .http_client + .stream_request::<(), T, TaskInfo>( + &url, + Method::Put { + query: (), + body: payload, + }, + content_type, + 202, + ) + .await + } + + /// Delete all documents in the [Index]. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # #[derive(Serialize, Deserialize, Debug)] + /// # struct Movie { + /// # name: String, + /// # description: String, + /// # } + /// # + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movie_index = client.index("delete_all_documents"); + /// # + /// # movie_index.add_or_replace(&[Movie{name:String::from("Interstellar"), description:String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.")}], Some("name")).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # + /// movie_index.delete_all_documents() + /// .await + /// .unwrap() + /// .wait_for_completion(&client, None, None) + /// .await + /// .unwrap(); + /// let movies = movie_index.get_documents::().await.unwrap(); + /// assert_eq!(movies.results.len(), 0); + /// # movie_index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn delete_all_documents(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!("{}/indexes/{}/documents", self.client.host, self.uid), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Delete one document based on its unique id. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::client::*; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # #[derive(Serialize, Deserialize, Debug)] + /// # struct Movie { + /// # name: String, + /// # description: String, + /// # } + /// # + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut movies = client.index("delete_document"); + /// # movies.add_or_replace(&[Movie{name:String::from("Interstellar"), description:String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.")}], Some("name")).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// // add a document with id = Interstellar + /// movies.delete_document("Interstellar") + /// .await + /// .unwrap() + /// .wait_for_completion(&client, None, None) + /// .await + /// .unwrap(); + /// # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn delete_document(&self, uid: T) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/documents/{}", + self.client.host, self.uid, uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Delete a selection of documents based on array of document id's. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::client::*; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # #[derive(Serialize, Deserialize, Debug)] + /// # struct Movie { + /// # name: String, + /// # description: String, + /// # } + /// # + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movies = client.index("delete_documents"); + /// # + /// # // add some documents + /// # movies.add_or_replace(&[Movie{name:String::from("Interstellar"), description:String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.")},Movie{name:String::from("Unknown"), description:String::from("Unknown")}], Some("name")).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # + /// // delete some documents + /// movies.delete_documents(&["Interstellar", "Unknown"]) + /// .await + /// .unwrap() + /// .wait_for_completion(&client, None, None) + /// .await + /// .unwrap(); + /// # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn delete_documents( + &self, + uids: &[T], + ) -> Result { + self.client + .http_client + .request::<(), &[T], TaskInfo>( + &format!( + "{}/indexes/{}/documents/delete-batch", + self.client.host, self.uid + ), + Method::Post { + query: (), + body: uids, + }, + 202, + ) + .await + } + + /// Delete a selection of documents with filters. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, documents::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # #[derive(Serialize, Deserialize, Debug)] + /// # struct Movie { + /// # name: String, + /// # id: String, + /// # } + /// # + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let index = client.index("delete_documents_with"); + /// # + /// # index.set_filterable_attributes(["id"]); + /// # // add some documents + /// # index.add_or_replace(&[Movie{id:String::from("1"), name: String::from("First movie") }, Movie{id:String::from("1"), name: String::from("First movie") }], Some("id")).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// + /// let mut query = DocumentDeletionQuery::new(&index); + /// query.with_filter("id = 1"); + /// // delete some documents + /// index.delete_documents_with(&query) + /// .await + /// .unwrap() + /// .wait_for_completion(&client, None, None) + /// .await + /// .unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn delete_documents_with( + &self, + query: &DocumentDeletionQuery<'_, Http>, + ) -> Result { + self.client + .http_client + .request::<(), &DocumentDeletionQuery, TaskInfo>( + &format!("{}/indexes/{}/documents/delete", self.client.host, self.uid), + Method::Post { + query: (), + body: query, + }, + 202, + ) + .await + } + + /// Alias for the [`Index::update`] method. + pub async fn set_primary_key( + &mut self, + primary_key: impl AsRef, + ) -> Result { + self.primary_key = Some(primary_key.as_ref().to_string()); + + self.update().await + } + + /// Fetch the information of the index as a raw JSON [Index], this index should already exist. + /// + /// If you use it directly from the [Client], you can use the method [`Client::get_raw_index`], which is the equivalent method from the client. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # let index = client.create_index("fetch_info", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap().try_make_index(&client).unwrap(); + /// let mut idx = client.index("fetch_info"); + /// idx.fetch_info().await.unwrap(); + /// + /// println!("{idx:?}"); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn fetch_info(&mut self) -> Result<(), Error> { + let v = self.client.get_raw_index(&self.uid).await?; + *self = Index::from_value(v, self.client.clone())?; + Ok(()) + } + + /// Fetch the primary key of the index. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # // create the client + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut index = client.create_index("get_primary_key", Some("id")) + /// .await + /// .unwrap() + /// .wait_for_completion(&client, None, None) + /// .await.unwrap() + /// .try_make_index(&client) + /// .unwrap(); + /// + /// let primary_key = index.get_primary_key().await.unwrap(); + /// + /// assert_eq!(primary_key, Some("id")); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_primary_key(&mut self) -> Result, Error> { + self.fetch_info().await?; + Ok(self.primary_key.as_deref()) + } + + /// Get a [Task] from a specific [Index] to keep track of [asynchronous operations](https://www.meilisearch.com/docs/learn/advanced/asynchronous_operations). + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use std::thread::sleep; + /// # use std::time::Duration; + /// # use meilisearch_sdk::{client::*, indexes::*, tasks::Task}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # #[derive(Debug, Serialize, Deserialize, PartialEq)] + /// # struct Document { + /// # id: usize, + /// # value: String, + /// # kind: String, + /// # } + /// # + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movies = client.index("get_task"); + /// + /// let task = movies.add_documents(&[ + /// Document { id: 0, kind: "title".into(), value: "The Social Network".to_string() } + /// ], None).await.unwrap(); + /// # task.clone().wait_for_completion(&client, None, None).await.unwrap(); + /// + /// // Get task status from the index, using `uid` + /// let status = movies.get_task(&task).await.unwrap(); + /// + /// let from_index = match status { + /// Task::Enqueued { content } => content.uid, + /// Task::Processing { content } => content.uid, + /// Task::Failed { content } => content.task.uid, + /// Task::Succeeded { content } => content.uid, + /// }; + /// + /// assert_eq!(task.get_task_uid(), from_index); + /// # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_task(&self, uid: impl AsRef) -> Result { + self.client + .http_client + .request::<(), (), Task>( + &format!("{}/tasks/{}", self.client.host, uid.as_ref()), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get the status of all tasks in a given index. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # let index = client.create_index("get_tasks", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap().try_make_index(&client).unwrap(); + /// let tasks = index.get_tasks().await.unwrap(); + /// + /// assert!(tasks.results.len() > 0); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_tasks(&self) -> Result { + let mut query = TasksSearchQuery::new(&self.client); + query.with_index_uids([self.uid.as_str()]); + + self.client.get_tasks_with(&query).await + } + + /// Get the status of all tasks in a given index. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*, tasks::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # let index = client.create_index("get_tasks_with", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap().try_make_index(&client).unwrap(); + /// let mut query = TasksSearchQuery::new(&client); + /// query.with_index_uids(["none_existant"]); + /// + /// let tasks = index.get_tasks_with(&query).await.unwrap(); + /// + /// assert!(tasks.results.len() > 0); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_tasks_with( + &self, + tasks_query: &TasksQuery<'_, TasksPaginationFilters, Http>, + ) -> Result { + let mut query = tasks_query.clone(); + query.with_index_uids([self.uid.as_str()]); + + self.client.get_tasks_with(&query).await + } + + /// Get stats of an index. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # let index = client.create_index("get_stats", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap().try_make_index(&client).unwrap(); + /// let stats = index.get_stats().await.unwrap(); + /// + /// assert_eq!(stats.is_indexing, false); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_stats(&self) -> Result { + self.client + .http_client + .request::<(), (), IndexStats>( + &format!("{}/indexes/{}/stats", self.client.host, self.uid), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Wait until Meilisearch processes a [Task], and get its status. + /// + /// `interval` = The frequency at which the server should be polled. **Default = 50ms** + /// + /// `timeout` = The maximum time to wait for processing to complete. **Default = 5000ms** + /// + /// If the waited time exceeds `timeout` then an [`Error::Timeout`] will be returned. + /// + /// See also [`Client::wait_for_task`, `Task::wait_for_completion`]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, tasks::Task}; + /// # use serde::{Serialize, Deserialize}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # #[derive(Debug, Serialize, Deserialize, PartialEq)] + /// # struct Document { + /// # id: usize, + /// # value: String, + /// # kind: String, + /// # } + /// # + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movies = client.index("movies_index_wait_for_task"); + /// + /// let task = movies.add_documents(&[ + /// Document { id: 0, kind: "title".into(), value: "The Social Network".to_string() }, + /// Document { id: 1, kind: "title".into(), value: "Harry Potter and the Sorcerer's Stone".to_string() }, + /// ], None).await.unwrap(); + /// + /// let status = movies.wait_for_task(task, None, None).await.unwrap(); + /// + /// assert!(matches!(status, Task::Succeeded { .. })); + /// # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn wait_for_task( + &self, + task_id: impl AsRef, + interval: Option, + timeout: Option, + ) -> Result { + self.client.wait_for_task(task_id, interval, timeout).await + } + + /// Add documents to the index in batches. + /// + /// `documents` = A slice of documents + /// + /// `batch_size` = Optional parameter that allows you to specify the size of the batch + /// + /// **`batch_size` is 1000 by default** + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::client::*; + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// #[derive(Serialize, Deserialize, Debug)] + /// struct Movie { + /// name: String, + /// description: String, + /// } + /// + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movie_index = client.index("add_documents_in_batches"); + /// + /// let tasks = movie_index.add_documents_in_batches(&[ + /// Movie { + /// name: String::from("Interstellar"), + /// description: String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.") + /// }, + /// Movie { + /// // note that the id field can only take alphanumerics characters (and '-' and '/') + /// name: String::from("MrsDoubtfire"), + /// description: String::from("Loving but irresponsible dad Daniel Hillard, estranged from his exasperated spouse, is crushed by a court order allowing only weekly visits with his kids. When Daniel learns his ex needs a housekeeper, he gets the job -- disguised as an English nanny. Soon he becomes not only his children's best pal but the kind of parent he should have been from the start.") + /// }, + /// Movie { + /// name: String::from("Apollo13"), + /// description: String::from("The true story of technical troubles that scuttle the Apollo 13 lunar mission in 1971, risking the lives of astronaut Jim Lovell and his crew, with the failed journey turning into a thrilling saga of heroism. Drifting more than 200,000 miles from Earth, the astronauts work furiously with the ground crew to avert tragedy.") + /// }], + /// Some(1), + /// Some("name") + /// ).await.unwrap(); + /// + /// client.wait_for_task(tasks.last().unwrap(), None, None).await.unwrap(); + /// + /// let movies = movie_index.get_documents::().await.unwrap(); + /// + /// assert!(movies.results.len() >= 3); + /// # movie_index.delete().await.unwrap().wait_for_completion(&client, None, + /// # None).await.unwrap(); + /// # }); + /// ``` + pub async fn add_documents_in_batches( + &self, + documents: &[T], + batch_size: Option, + primary_key: Option<&str>, + ) -> Result, Error> { + let mut task = Vec::with_capacity(documents.len()); + for document_batch in documents.chunks(batch_size.unwrap_or(1000)) { + task.push(self.add_documents(document_batch, primary_key).await?); + } + Ok(task) + } + + /// Update documents to the index in batches. + /// + /// `documents` = A slice of documents + /// + /// `batch_size` = Optional parameter that allows you to specify the size of the batch + /// + /// **`batch_size` is 1000 by default** + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::client::*; + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// #[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] + /// struct Movie { + /// name: String, + /// description: String, + /// } + /// + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movie_index = client.index("update_documents_in_batches"); + /// + /// let tasks = movie_index.add_documents_in_batches(&[ + /// Movie { + /// name: String::from("Interstellar"), + /// description: String::from("Interstellar chronicles the adventures of a group of explorers who make use of a newly discovered wormhole to surpass the limitations on human space travel and conquer the vast distances involved in an interstellar voyage.") + /// }, + /// Movie { + /// // note that the id field can only take alphanumerics characters (and '-' and '/') + /// name: String::from("MrsDoubtfire"), + /// description: String::from("Loving but irresponsible dad Daniel Hillard, estranged from his exasperated spouse, is crushed by a court order allowing only weekly visits with his kids. When Daniel learns his ex needs a housekeeper, he gets the job -- disguised as an English nanny. Soon he becomes not only his children's best pal but the kind of parent he should have been from the start.") + /// }, + /// Movie { + /// name: String::from("Apollo13"), + /// description: String::from("The true story of technical troubles that scuttle the Apollo 13 lunar mission in 1971, risking the lives of astronaut Jim Lovell and his crew, with the failed journey turning into a thrilling saga of heroism. Drifting more than 200,000 miles from Earth, the astronauts work furiously with the ground crew to avert tragedy.") + /// }], + /// Some(1), + /// Some("name") + /// ).await.unwrap(); + /// + /// client.wait_for_task(tasks.last().unwrap(), None, None).await.unwrap(); + /// + /// let movies = movie_index.get_documents::().await.unwrap(); + /// assert!(movies.results.len() >= 3); + /// + /// let updated_movies = [ + /// Movie { + /// name: String::from("Interstellar"), + /// description: String::from("Updated!") + /// }, + /// Movie { + /// // note that the id field can only take alphanumerics characters (and '-' and '/') + /// name: String::from("MrsDoubtfire"), + /// description: String::from("Updated!") + /// }, + /// Movie { + /// name: String::from("Apollo13"), + /// description: String::from("Updated!") + /// }]; + /// + /// let tasks = movie_index.update_documents_in_batches(&updated_movies, Some(1), None).await.unwrap(); + /// + /// client.wait_for_task(tasks.last().unwrap(), None, None).await.unwrap(); + /// + /// let movies_updated = movie_index.get_documents::().await.unwrap(); + /// + /// assert!(movies_updated.results.len() >= 3); + /// # movie_index.delete().await.unwrap().wait_for_completion(&client, None, + /// # None).await.unwrap(); + /// # }); + /// ``` + pub async fn update_documents_in_batches( + &self, + documents: &[T], + batch_size: Option, + primary_key: Option<&str>, + ) -> Result, Error> { + let mut task = Vec::with_capacity(documents.len()); + for document_batch in documents.chunks(batch_size.unwrap_or(1000)) { + task.push(self.add_or_update(document_batch, primary_key).await?); + } + Ok(task) + } +} + +impl AsRef for Index { + fn as_ref(&self) -> &str { + &self.uid + } +} + +/// An [`IndexUpdater`] used to update the specifics of an index. +/// +/// # Example +/// +/// ``` +/// # use meilisearch_sdk::{client::*, indexes::*, task_info::*, tasks::{Task, SucceededTask}}; +/// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); +/// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); +/// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { +/// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); +/// # let index = client +/// # .create_index("index_updater", None) +/// # .await +/// # .unwrap() +/// # .wait_for_completion(&client, None, None) +/// # .await +/// # .unwrap() +/// # // Once the task finished, we try to create an `Index` out of it +/// # .try_make_index(&client) +/// # .unwrap(); +/// let task = IndexUpdater::new("index_updater", &client) +/// .with_primary_key("special_id") +/// .execute() +/// .await +/// .unwrap() +/// .wait_for_completion(&client, None, None) +/// .await +/// .unwrap(); +/// +/// let index = client.get_index("index_updater").await.unwrap(); +/// +/// assert_eq!(index.primary_key, Some("special_id".to_string())); +/// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); +/// # }); +/// ``` +#[derive(Debug, Serialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct IndexUpdater<'a, Http: HttpClient> { + #[serde(skip)] + pub client: &'a Client, + #[serde(skip_serializing)] + pub uid: String, + pub primary_key: Option, +} + +impl<'a, Http: HttpClient> IndexUpdater<'a, Http> { + pub fn new(uid: impl AsRef, client: &Client) -> IndexUpdater { + IndexUpdater { + client, + primary_key: None, + uid: uid.as_ref().to_string(), + } + } + /// Define the new `primary_key` to set on the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, task_info::*, tasks::{Task, SucceededTask}}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # let index = client + /// # .create_index("index_updater_with_primary_key", None) + /// # .await + /// # .unwrap() + /// # .wait_for_completion(&client, None, None) + /// # .await + /// # .unwrap() + /// # // Once the task finished, we try to create an `Index` out of it + /// # .try_make_index(&client) + /// # .unwrap(); + /// let task = IndexUpdater::new("index_updater_with_primary_key", &client) + /// .with_primary_key("special_id") + /// .execute() + /// .await + /// .unwrap() + /// .wait_for_completion(&client, None, None) + /// .await + /// .unwrap(); + /// + /// let index = client.get_index("index_updater_with_primary_key").await.unwrap(); + /// + /// assert_eq!(index.primary_key, Some("special_id".to_string())); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub fn with_primary_key( + &mut self, + primary_key: impl AsRef, + ) -> &mut IndexUpdater<'a, Http> { + self.primary_key = Some(primary_key.as_ref().to_string()); + self + } + + /// Execute the update of an [Index] using the [`IndexUpdater`]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, task_info::*, tasks::{Task, SucceededTask}}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # let index = client + /// # .create_index("index_updater_execute", None) + /// # .await + /// # .unwrap() + /// # .wait_for_completion(&client, None, None) + /// # .await + /// # .unwrap() + /// # // Once the task finished, we try to create an `Index` out of it + /// # .try_make_index(&client) + /// # .unwrap(); + /// let task = IndexUpdater::new("index_updater_execute", &client) + /// .with_primary_key("special_id") + /// .execute() + /// .await + /// .unwrap() + /// .wait_for_completion(&client, None, None) + /// .await + /// .unwrap(); + /// + /// let index = client.get_index("index_updater_execute").await.unwrap(); + /// + /// assert_eq!(index.primary_key, Some("special_id".to_string())); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn execute(&'a self) -> Result { + self.client + .http_client + .request::<(), &IndexUpdater, TaskInfo>( + &format!("{}/indexes/{}", self.client.host, self.uid), + Method::Patch { + query: (), + body: self, + }, + 202, + ) + .await + } +} + +impl AsRef for IndexUpdater<'_, Http> { + fn as_ref(&self) -> &str { + &self.uid + } +} + +impl<'a, Http: HttpClient> AsRef> for IndexUpdater<'a, Http> { + fn as_ref(&self) -> &IndexUpdater<'a, Http> { + self + } +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct IndexStats { + pub number_of_documents: usize, + pub is_indexing: bool, + pub field_distribution: HashMap, +} + +/// An [`IndexesQuery`] containing filter and pagination parameters when searching for [Indexes](Index). +/// +/// # Example +/// +/// ``` +/// # use meilisearch_sdk::{client::*, indexes::*}; +/// # +/// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); +/// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); +/// # +/// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { +/// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); +/// # let index = client +/// # .create_index("index_query_builder", None) +/// # .await +/// # .unwrap() +/// # .wait_for_completion(&client, None, None) +/// # .await +/// # .unwrap() +/// # // Once the task finished, we try to create an `Index` out of it. +/// # .try_make_index(&client) +/// # .unwrap(); +/// let mut indexes = IndexesQuery::new(&client) +/// .with_limit(1) +/// .execute().await.unwrap(); +/// +/// assert_eq!(indexes.results.len(), 1); +/// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); +/// # }); +/// ``` +#[derive(Debug, Serialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct IndexesQuery<'a, Http: HttpClient> { + #[serde(skip_serializing)] + pub client: &'a Client, + /// The number of [Indexes](Index) to skip. + /// + /// If the value of the parameter `offset` is `n`, the `n` first indexes will not be returned. + /// This is helpful for pagination. + /// + /// Example: If you want to skip the first index, set offset to `1`. + #[serde(skip_serializing_if = "Option::is_none")] + pub offset: Option, + + /// The maximum number of [Indexes](Index) returned. + /// + /// If the value of the parameter `limit` is `n`, there will never be more than `n` indexes in the response. + /// This is helpful for pagination. + /// + /// Example: If you don't want to get more than two indexes, set limit to `2`. + /// + /// **Default: `20`** + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, +} + +impl<'a, Http: HttpClient> IndexesQuery<'a, Http> { + #[must_use] + pub fn new(client: &Client) -> IndexesQuery { + IndexesQuery { + client, + offset: None, + limit: None, + } + } + + /// Specify the offset. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # let index = client + /// # .create_index("index_query_with_offset", None) + /// # .await + /// # .unwrap() + /// # .wait_for_completion(&client, None, None) + /// # .await + /// # .unwrap() + /// # // Once the task finished, we try to create an `Index` out of it + /// # .try_make_index(&client) + /// # .unwrap(); + /// let mut indexes = IndexesQuery::new(&client) + /// .with_offset(1) + /// .execute().await.unwrap(); + /// + /// assert_eq!(indexes.offset, 1); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub fn with_offset(&mut self, offset: usize) -> &mut IndexesQuery<'a, Http> { + self.offset = Some(offset); + self + } + + /// Specify the maximum number of [Indexes](Index) to return. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # let index = client + /// # .create_index("index_query_with_limit", None) + /// # .await + /// # .unwrap() + /// # .wait_for_completion(&client, None, None) + /// # .await + /// # .unwrap() + /// # // Once the task finished, we try to create an `Index` out of it + /// # .try_make_index(&client) + /// # .unwrap(); + /// let mut indexes = IndexesQuery::new(&client) + /// .with_limit(1) + /// .execute().await.unwrap(); + /// + /// assert_eq!(indexes.results.len(), 1); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub fn with_limit(&mut self, limit: usize) -> &mut IndexesQuery<'a, Http> { + self.limit = Some(limit); + self + } + /// Get [Indexes](Index). + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{indexes::IndexesQuery, client::Client}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # let index = client + /// # .create_index("index_query_with_execute", None) + /// # .await + /// # .unwrap() + /// # .wait_for_completion(&client, None, None) + /// # .await + /// # .unwrap() + /// # // Once the task finished, we try to create an `Index` out of it + /// # .try_make_index(&client) + /// # .unwrap(); + /// let mut indexes = IndexesQuery::new(&client) + /// .with_limit(1) + /// .execute().await.unwrap(); + /// + /// assert_eq!(indexes.results.len(), 1); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn execute(&self) -> Result, Error> { + self.client.list_all_indexes_with(self).await + } +} + +#[derive(Debug, Clone)] +pub struct IndexesResults { + pub results: Vec>, + pub limit: u32, + pub offset: u32, + pub total: u32, +} + +#[cfg(test)] +mod tests { + use super::*; + + use big_s::S; + use meilisearch_test_macro::meilisearch_test; + use serde_json::json; + + #[meilisearch_test] + async fn test_from_value(client: Client) { + let t = OffsetDateTime::now_utc(); + let trfc3339 = t + .format(&time::format_description::well_known::Rfc3339) + .unwrap(); + + let value = json!({ + "createdAt": &trfc3339, + "primaryKey": null, + "uid": "test_from_value", + "updatedAt": &trfc3339, + }); + + let idx = Index { + uid: S("test_from_value"), + primary_key: None, + created_at: Some(t), + updated_at: Some(t), + client: client.clone(), + }; + + let res = Index::from_value(value, client).unwrap(); + + assert_eq!(res.updated_at, idx.updated_at); + assert_eq!(res.created_at, idx.created_at); + assert_eq!(res.uid, idx.uid); + assert_eq!(res.primary_key, idx.primary_key); + assert_eq!(res.client.host, idx.client.host); + assert_eq!(res.client.api_key, idx.client.api_key); + } + + #[meilisearch_test] + async fn test_fetch_info(mut index: Index) { + let res = index.fetch_info().await; + assert!(res.is_ok()); + assert!(index.updated_at.is_some()); + assert!(index.created_at.is_some()); + assert!(index.primary_key.is_none()); + } + + #[meilisearch_test] + async fn test_get_documents(index: Index) { + #[derive(Debug, Serialize, Deserialize, PartialEq)] + struct Object { + id: usize, + value: String, + kind: String, + } + let res = index.get_documents::().await.unwrap(); + + assert_eq!(res.limit, 20); + } + + #[meilisearch_test] + async fn test_get_documents_with(index: Index) { + #[derive(Debug, Serialize, Deserialize, PartialEq)] + struct Object { + id: usize, + value: String, + kind: String, + } + + let mut documents_query = DocumentsQuery::new(&index); + documents_query.with_limit(1).with_offset(2); + + let res = index + .get_documents_with::(&documents_query) + .await + .unwrap(); + + assert_eq!(res.limit, 1); + assert_eq!(res.offset, 2); + } + + #[meilisearch_test] + async fn test_update_document_json(client: Client, index: Index) -> Result<(), Error> { + let old_json = [ + json!({ "id": 1, "body": "doggo" }), + json!({ "id": 2, "body": "catto" }), + ]; + let updated_json = [ + json!({ "id": 1, "second_body": "second_doggo" }), + json!({ "id": 2, "second_body": "second_catto" }), + ]; + + let task = index + .add_documents(&old_json, Some("id")) + .await + .unwrap() + .wait_for_completion(&client, None, None) + .await + .unwrap(); + let _ = index.get_task(task).await?; + + let task = index + .add_or_update(&updated_json, None) + .await + .unwrap() + .wait_for_completion(&client, None, None) + .await + .unwrap(); + + let status = index.get_task(task).await?; + let elements = index.get_documents::().await.unwrap(); + + assert!(matches!(status, Task::Succeeded { .. })); + assert_eq!(elements.results.len(), 2); + + let expected_result = vec![ + json!( {"body": "doggo", "id": 1, "second_body": "second_doggo"}), + json!( {"body": "catto", "id": 2, "second_body": "second_catto"}), + ]; + + assert_eq!(elements.results, expected_result); + + Ok(()) + } + + #[meilisearch_test] + async fn test_add_documents_ndjson(client: Client, index: Index) -> Result<(), Error> { + let ndjson = r#"{ "id": 1, "body": "doggo" }{ "id": 2, "body": "catto" }"#.as_bytes(); + + let task = index + .add_documents_ndjson(ndjson, Some("id")) + .await? + .wait_for_completion(&client, None, None) + .await?; + + let status = index.get_task(task).await?; + let elements = index.get_documents::().await.unwrap(); + assert!(matches!(status, Task::Succeeded { .. })); + assert_eq!(elements.results.len(), 2); + + Ok(()) + } + + #[meilisearch_test] + async fn test_update_documents_ndjson(client: Client, index: Index) -> Result<(), Error> { + let old_ndjson = r#"{ "id": 1, "body": "doggo" }{ "id": 2, "body": "catto" }"#.as_bytes(); + let updated_ndjson = + r#"{ "id": 1, "second_body": "second_doggo" }{ "id": 2, "second_body": "second_catto" }"#.as_bytes(); + // Add first njdson document + let task = index + .add_documents_ndjson(old_ndjson, Some("id")) + .await? + .wait_for_completion(&client, None, None) + .await?; + let _ = index.get_task(task).await?; + + // Update via njdson document + let task = index + .update_documents_ndjson(updated_ndjson, Some("id")) + .await? + .wait_for_completion(&client, None, None) + .await?; + + let status = index.get_task(task).await?; + let elements = index.get_documents::().await.unwrap(); + + assert!(matches!(status, Task::Succeeded { .. })); + assert_eq!(elements.results.len(), 2); + + let expected_result = vec![ + json!( {"body": "doggo", "id": 1, "second_body": "second_doggo"}), + json!( {"body": "catto", "id": 2, "second_body": "second_catto"}), + ]; + + assert_eq!(elements.results, expected_result); + + Ok(()) + } + + #[meilisearch_test] + async fn test_add_documents_csv(client: Client, index: Index) -> Result<(), Error> { + let csv_input = "id,body\n1,\"doggo\"\n2,\"catto\"".as_bytes(); + + let task = index + .add_documents_csv(csv_input, Some("id")) + .await? + .wait_for_completion(&client, None, None) + .await?; + + let status = index.get_task(task).await?; + let elements = index.get_documents::().await.unwrap(); + assert!(matches!(status, Task::Succeeded { .. })); + assert_eq!(elements.results.len(), 2); + + Ok(()) + } + + #[meilisearch_test] + async fn test_update_documents_csv(client: Client, index: Index) -> Result<(), Error> { + let old_csv = "id,body\n1,\"doggo\"\n2,\"catto\"".as_bytes(); + let updated_csv = "id,body\n1,\"new_doggo\"\n2,\"new_catto\"".as_bytes(); + // Add first njdson document + let task = index + .add_documents_csv(old_csv, Some("id")) + .await? + .wait_for_completion(&client, None, None) + .await?; + let _ = index.get_task(task).await?; + + // Update via njdson document + let task = index + .update_documents_csv(updated_csv, Some("id")) + .await? + .wait_for_completion(&client, None, None) + .await?; + + let status = index.get_task(task).await?; + let elements = index.get_documents::().await.unwrap(); + + assert!(matches!(status, Task::Succeeded { .. })); + assert_eq!(elements.results.len(), 2); + + let expected_result = vec![ + json!( {"body": "new_doggo", "id": "1"}), + json!( {"body": "new_catto", "id": "2"}), + ]; + + assert_eq!(elements.results, expected_result); + + Ok(()) + } + #[meilisearch_test] + + async fn test_get_one_task(client: Client, index: Index) -> Result<(), Error> { + let task = index + .delete_all_documents() + .await? + .wait_for_completion(&client, None, None) + .await?; + + let status = index.get_task(task).await?; + + match status { + Task::Enqueued { + content: + EnqueuedTask { + index_uid: Some(index_uid), + .. + }, + } + | Task::Processing { + content: + ProcessingTask { + index_uid: Some(index_uid), + .. + }, + } + | Task::Failed { + content: + FailedTask { + task: + SucceededTask { + index_uid: Some(index_uid), + .. + }, + .. + }, + } + | Task::Succeeded { + content: + SucceededTask { + index_uid: Some(index_uid), + .. + }, + } => assert_eq!(index_uid, *index.uid), + task => panic!( + "The task should have an index_uid that is not null {:?}", + task + ), + } + Ok(()) + } +} diff --git a/backend/vendor/meilisearch-sdk/src/key.rs b/backend/vendor/meilisearch-sdk/src/key.rs new file mode 100644 index 000000000..bda7eb267 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/key.rs @@ -0,0 +1,723 @@ +use serde::{Deserialize, Serialize}; +use time::OffsetDateTime; + +use crate::{client::Client, errors::Error, request::HttpClient}; + +/// Represents a [meilisearch key](https://www.meilisearch.com/docs/reference/api/keys#returned-fields). +/// +/// You can get a [Key] from the [`Client::get_key`] method, or you can create a [Key] with the [`KeyBuilder::new`] or [`Client::create_key`] methods. +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct Key { + #[serde(skip_serializing_if = "Vec::is_empty")] + pub actions: Vec, + #[serde(skip_serializing, with = "time::serde::rfc3339")] + pub created_at: OffsetDateTime, + pub description: Option, + pub name: Option, + #[serde(with = "time::serde::rfc3339::option")] + pub expires_at: Option, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub indexes: Vec, + #[serde(skip_serializing)] + pub key: String, + #[serde(skip_serializing)] + pub uid: String, + #[serde(skip_serializing, with = "time::serde::rfc3339")] + pub updated_at: OffsetDateTime, +} + +impl Key { + /// Update the description of the [Key]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{key::*, client::Client}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let description = "My not so little lovely test key".to_string(); + /// let mut key = KeyBuilder::new() + /// .with_action(Action::DocumentsAdd) + /// .with_index("*") + /// .with_description(&description) + /// .execute(&client).await.unwrap(); + /// + /// assert_eq!(key.description, Some(description)); + /// # client.delete_key(key).await.unwrap(); + /// # }); + /// ``` + pub fn with_description(&mut self, desc: impl AsRef) -> &mut Key { + self.description = Some(desc.as_ref().to_string()); + self + } + + /// Update the name of the [Key]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{key::*, client::Client}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let name = "lovely key".to_string(); + /// let mut key = KeyBuilder::new() + /// .with_action(Action::DocumentsAdd) + /// .with_index("*") + /// .execute(&client) + /// .await + /// .unwrap(); + /// + /// key.with_name(&name); + /// + /// assert_eq!(key.name, Some(name)); + /// # client.delete_key(key).await.unwrap(); + /// # }); + /// ``` + pub fn with_name(&mut self, desc: impl AsRef) -> &mut Key { + self.name = Some(desc.as_ref().to_string()); + self + } + + /// Update the [Key]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{key::KeyBuilder, client::Client}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut key = KeyBuilder::new() + /// .execute(&client) + /// .await + /// .unwrap(); + /// + /// let description = "My not so little lovely test key".to_string(); + /// key.with_description(&description); + /// + /// let key = key.update(&client).await.unwrap(); + /// + /// assert_eq!(key.description, Some(description)); + /// # client.delete_key(key).await.unwrap(); + /// # }); + /// ``` + pub async fn update(&self, client: &Client) -> Result { + // only send description and name + let mut key_update = KeyUpdater::new(self); + + if let Some(ref description) = self.description { + key_update.with_description(description); + } + if let Some(ref name) = self.name { + key_update.with_name(name); + } + + key_update.execute(client).await + } + + /// Delete the [Key]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{key::KeyBuilder, client::Client}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut key = KeyBuilder::new() + /// .execute(&client).await.unwrap(); + /// + /// client.delete_key(key).await.unwrap(); + /// # }); + /// ``` + pub async fn delete(&self, client: &Client) -> Result<(), Error> { + client.delete_key(self).await + } +} + +impl AsRef for Key { + fn as_ref(&self) -> &str { + &self.key + } +} + +impl AsRef for Key { + fn as_ref(&self) -> &Key { + self + } +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct KeyUpdater { + pub description: Option, + pub name: Option, + #[serde(skip_serializing)] + pub key: String, +} + +impl KeyUpdater { + pub fn new(key_or_uid: impl AsRef) -> KeyUpdater { + KeyUpdater { + description: None, + name: None, + key: key_or_uid.as_ref().to_string(), + } + } + + /// Update the description of the [Key]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{key::*, client::Client}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut new_key = KeyBuilder::new() + /// .execute(&client) + /// .await + /// .unwrap(); + /// + /// let description = "My not so little lovely test key".to_string(); + /// let mut key_update = KeyUpdater::new(new_key) + /// .with_description(&description) + /// .execute(&client) + /// .await + /// .unwrap(); + /// + /// assert_eq!(key_update.description, Some(description)); + /// # client.delete_key(key_update).await.unwrap(); + /// # }); + /// ``` + pub fn with_description(&mut self, desc: impl AsRef) -> &mut KeyUpdater { + self.description = Some(desc.as_ref().to_string()); + self + } + + /// Update the name of the [Key]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{key::*, client::Client}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut new_key = KeyBuilder::new() + /// .execute(&client) + /// .await + /// .unwrap(); + /// + /// let name = "lovely key".to_string(); + /// let mut key_update = KeyUpdater::new(new_key) + /// .with_name(&name) + /// .execute(&client) + /// .await + /// .unwrap(); + /// + /// assert_eq!(key_update.name, Some(name)); + /// # client.delete_key(key_update).await.unwrap(); + /// # }); + /// ``` + pub fn with_name(&mut self, desc: impl AsRef) -> &mut KeyUpdater { + self.name = Some(desc.as_ref().to_string()); + self + } + + /// Update a [Key] using the [`KeyUpdater`]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{key::*, client::Client}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let description = "My little lovely test key".to_string(); + /// let key = KeyBuilder::new() + /// .execute(&client).await.unwrap(); + /// + /// let mut key_update = KeyUpdater::new(&key.key); + /// key_update.with_description(&description).execute(&client).await; + /// + /// assert_eq!(key_update.description, Some(description)); + /// # client.delete_key(key).await.unwrap(); + /// # }); + /// ``` + pub async fn execute(&self, client: &Client) -> Result { + client.update_key(self).await + } +} + +impl AsRef for KeyUpdater { + fn as_ref(&self) -> &str { + &self.key + } +} + +impl AsRef for KeyUpdater { + fn as_ref(&self) -> &KeyUpdater { + self + } +} + +#[derive(Debug, Serialize, Clone, Default)] +#[serde(rename_all = "camelCase")] +pub struct KeysQuery { + /// The number of documents to skip. + /// + /// If the value of the parameter `offset` is `n`, the `n` first documents (ordered by relevance) will not be returned. + /// This is helpful for pagination. + /// + /// Example: If you want to skip the first document, set offset to `1`. + #[serde(skip_serializing_if = "Option::is_none")] + pub offset: Option, + /// The maximum number of documents returned. + /// + /// If the value of the parameter `limit` is `n`, there will never be more than `n` documents in the response. + /// This is helpful for pagination. + /// + /// Example: If you don't want to get more than two documents, set limit to `2`. + /// + /// **Default: `20`** + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, +} + +impl KeysQuery { + /// Create a [`KeysQuery`] with only a description. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{key::KeysQuery}; + /// let builder = KeysQuery::new(); + /// ``` + #[must_use] + pub fn new() -> KeysQuery { + Self::default() + } + + /// Specify the offset. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{key::*, client::Client}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut keys = KeysQuery::new() + /// .with_offset(1) + /// .execute(&client).await.unwrap(); + /// + /// assert_eq!(keys.offset, 1); + /// # }); + /// ``` + pub fn with_offset(&mut self, offset: usize) -> &mut KeysQuery { + self.offset = Some(offset); + self + } + + /// Specify the maximum number of keys to return. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{key::*, client::Client}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut keys = KeysQuery::new() + /// .with_limit(1) + /// .execute(&client).await.unwrap(); + /// + /// assert_eq!(keys.results.len(), 1); + /// # }); + /// ``` + pub fn with_limit(&mut self, limit: usize) -> &mut KeysQuery { + self.limit = Some(limit); + self + } + + /// Get [Key]'s. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{key::*, client::Client}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut keys = KeysQuery::new() + /// .with_limit(1) + /// .execute(&client).await.unwrap(); + /// + /// assert_eq!(keys.results.len(), 1); + /// # }); + /// ``` + pub async fn execute( + &self, + client: &Client, + ) -> Result { + client.get_keys_with(self).await + } +} + +/// The [`KeyBuilder`] is an analog to the [Key] type but without all the fields managed by Meilisearch. +/// +/// It's used to create [Key]. +/// +/// # Example +/// +/// ``` +/// # use meilisearch_sdk::{key::*, client::Client}; +/// # +/// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); +/// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); +/// # +/// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { +/// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); +/// let description = "My little lovely test key".to_string(); +/// let key = KeyBuilder::new() +/// .with_description(&description) +/// .execute(&client).await.unwrap(); +/// +/// assert_eq!(key.description, Some(description)); +/// # client.delete_key(key).await.unwrap(); +/// # }); +/// ``` +#[derive(Debug, Clone, Serialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct KeyBuilder { + pub actions: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub name: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub uid: Option, + #[serde(with = "time::serde::rfc3339::option")] + pub expires_at: Option, + pub indexes: Vec, +} + +impl KeyBuilder { + /// Create a [`KeyBuilder`]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::key::KeyBuilder; + /// let builder = KeyBuilder::new(); + /// ``` + #[must_use] + pub fn new() -> KeyBuilder { + Self::default() + } + + /// Declare a set of actions the [Key] will be able to execute. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::key::*; + /// let mut builder = KeyBuilder::new(); + /// builder.with_actions(vec![Action::Search, Action::DocumentsAdd]); + /// ``` + pub fn with_actions(&mut self, actions: impl IntoIterator) -> &mut KeyBuilder { + self.actions.extend(actions); + self + } + + /// Add one action the [Key] will be able to execute. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::key::*; + /// let mut builder = KeyBuilder::new(); + /// builder.with_action(Action::DocumentsAdd); + /// ``` + pub fn with_action(&mut self, action: Action) -> &mut KeyBuilder { + self.actions.push(action); + self + } + + /// Set the expiration date of the [Key]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::key::KeyBuilder; + /// # use time::{OffsetDateTime, Duration}; + /// let mut builder = KeyBuilder::new(); + /// // create a key that expires in two weeks from now + /// builder.with_expires_at(OffsetDateTime::now_utc() + Duration::WEEK * 2); + /// ``` + pub fn with_expires_at(&mut self, expires_at: OffsetDateTime) -> &mut KeyBuilder { + self.expires_at = Some(expires_at); + self + } + + /// Set the indexes the [Key] can manage. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{key::KeyBuilder, client::Client}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let mut key = KeyBuilder::new() + /// .with_indexes(vec!["test", "movies"]) + /// .execute(&client) + /// .await + /// .unwrap(); + /// + /// assert_eq!(vec!["test", "movies"], key.indexes); + /// # client.delete_key(key).await.unwrap(); + /// # }); + /// ``` + pub fn with_indexes( + &mut self, + indexes: impl IntoIterator>, + ) -> &mut KeyBuilder { + self.indexes = indexes + .into_iter() + .map(|index| index.as_ref().to_string()) + .collect(); + self + } + + /// Add one index the [Key] can manage. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::key::KeyBuilder; + /// let mut builder = KeyBuilder::new(); + /// builder.with_index("test"); + /// ``` + pub fn with_index(&mut self, index: impl AsRef) -> &mut KeyBuilder { + self.indexes.push(index.as_ref().to_string()); + self + } + + /// Add a description to the [Key]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{key::*, client::Client}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let description = "My not so little lovely test key".to_string(); + /// let mut key = KeyBuilder::new() + /// .with_description(&description) + /// .execute(&client).await.unwrap(); + /// + /// assert_eq!(key.description, Some(description)); + /// # client.delete_key(key).await.unwrap(); + /// # }); + /// ``` + pub fn with_description(&mut self, desc: impl AsRef) -> &mut KeyBuilder { + self.description = Some(desc.as_ref().to_string()); + self + } + + /// Add a name to the [Key]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{key::*, client::Client}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let name = "lovely key".to_string(); + /// let mut key = KeyBuilder::new() + /// .with_name(&name) + /// .execute(&client).await.unwrap(); + /// + /// assert_eq!(key.name, Some(name)); + /// # client.delete_key(key).await.unwrap(); + /// # }); + /// ``` + pub fn with_name(&mut self, desc: impl AsRef) -> &mut KeyBuilder { + self.name = Some(desc.as_ref().to_string()); + self + } + + /// Add a uid to the [Key]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{key::*, client::Client}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let uid = "93bcd7fb-2196-4fd9-acb7-3fca8a96e78f".to_string(); + /// let mut key = KeyBuilder::new() + /// .with_uid(&uid) + /// .execute(&client).await.unwrap(); + /// + /// assert_eq!(key.uid, uid); + /// # client.delete_key(key).await.unwrap(); + /// # }); + /// ``` + pub fn with_uid(&mut self, desc: impl AsRef) -> &mut KeyBuilder { + self.uid = Some(desc.as_ref().to_string()); + self + } + + /// Create a [Key] from the builder. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{key::KeyBuilder, client::Client}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let description = "My little lovely test key".to_string(); + /// let key = KeyBuilder::new() + /// .with_description(&description) + /// .execute(&client).await.unwrap(); + /// + /// assert_eq!(key.description, Some(description)); + /// # client.delete_key(key).await.unwrap(); + /// # }); + /// ``` + pub async fn execute(&self, client: &Client) -> Result { + client.create_key(self).await + } +} + +impl AsRef for KeyBuilder { + fn as_ref(&self) -> &KeyBuilder { + self + } +} + +#[derive(Debug, Copy, Clone, Eq, PartialEq, Serialize, Deserialize)] +pub enum Action { + /// Provides access to everything. + #[serde(rename = "*")] + All, + /// Provides access to both [`POST`](https://www.meilisearch.com/docs/reference/api/search.md#search-in-an-index-with-post-route) and [`GET`](https://www.meilisearch.com/docs/reference/api/search.md#search-in-an-index-with-get-route) search endpoints on authorized indexes. + #[serde(rename = "search")] + Search, + /// Provides access to the [add documents](https://www.meilisearch.com/docs/reference/api/documents.md#add-or-replace-documents) and [update documents](https://www.meilisearch.com/docs/reference/api/documents.md#add-or-update-documents) endpoints on authorized indexes. + #[serde(rename = "documents.add")] + DocumentsAdd, + /// Provides access to the [get one document](https://www.meilisearch.com/docs/reference/api/documents.md#get-one-document) and [get documents](https://www.meilisearch.com/docs/reference/api/documents.md#get-documents) endpoints on authorized indexes. + #[serde(rename = "documents.get")] + DocumentsGet, + /// Provides access to the [delete one document](https://www.meilisearch.com/docs/reference/api/documents.md#delete-one-document), [delete all documents](https://www.meilisearch.com/docs/reference/api/documents.md#delete-all-documents), and [batch delete](https://www.meilisearch.com/docs/reference/api/documents.md#delete-documents-by-batch) endpoints on authorized indexes. + #[serde(rename = "documents.delete")] + DocumentsDelete, + /// Provides access to the [create index](https://www.meilisearch.com/docs/reference/api/indexes.md#create-an-index) endpoint. + #[serde(rename = "indexes.create")] + IndexesCreate, + /// Provides access to the [get one index](https://www.meilisearch.com/docs/reference/api/indexes.md#get-one-index) and [list all indexes](https://www.meilisearch.com/docs/reference/api/indexes.md#list-all-indexes) endpoints. **Non-authorized `indexes` will be omitted from the response**. + #[serde(rename = "indexes.get")] + IndexesGet, + /// Provides access to the [update index](https://www.meilisearch.com/docs/reference/api/indexes.md#update-an-index) endpoint. + #[serde(rename = "indexes.update")] + IndexesUpdate, + /// Provides access to the [delete index](https://www.meilisearch.com/docs/reference/api/indexes.md#delete-an-index) endpoint. + #[serde(rename = "indexes.delete")] + IndexesDelete, + /// Provides access to the [get one task](https://www.meilisearch.com/docs/reference/api/tasks.md#get-task) and [get all tasks](https://www.meilisearch.com/docs/reference/api/tasks.md#get-all-tasks) endpoints. **Tasks from non-authorized `indexes` will be omitted from the response**. Also provides access to the [get one task by index](https://www.meilisearch.com/docs/reference/api/tasks.md#get-task-by-index) and [get all tasks by index](https://www.meilisearch.com/docs/reference/api/tasks.md#get-all-tasks-by-index) endpoints on authorized indexes. + #[serde(rename = "tasks.get")] + TasksGet, + /// Provides access to the [get settings](https://www.meilisearch.com/docs/reference/api/settings.md#get-settings) endpoint and equivalents for all subroutes on authorized indexes. + #[serde(rename = "settings.get")] + SettingsGet, + /// Provides access to the [update settings](https://www.meilisearch.com/docs/reference/api/settings.md#update-settings) and [reset settings](https://www.meilisearch.com/docs/reference/api/settings.md#reset-settings) endpoints and equivalents for all subroutes on authorized indexes. + #[serde(rename = "settings.update")] + SettingsUpdate, + /// Provides access to the [get stats of an index](https://www.meilisearch.com/docs/reference/api/stats.md#get-stats-of-an-index) endpoint and the [get stats of all indexes](https://www.meilisearch.com/docs/reference/api/stats.md#get-stats-of-all-indexes) endpoint. For the latter, **non-authorized `indexes` are omitted from the response**. + #[serde(rename = "stats.get")] + StatsGet, + /// Provides access to the [create dump](https://www.meilisearch.com/docs/reference/api/dump.md#create-a-dump) endpoint. **Not restricted by `indexes`.** + #[serde(rename = "dumps.create")] + DumpsCreate, + /// Provides access to the [get dump status](https://www.meilisearch.com/docs/reference/api/dump.md#get-dump-status) endpoint. **Not restricted by `indexes`.** + #[serde(rename = "dumps.get")] + DumpsGet, + /// Provides access to the [get Meilisearch version](https://www.meilisearch.com/docs/reference/api/version.md#get-version-of-meilisearch) endpoint. + #[serde(rename = "version")] + Version, + /// Provides access to the [get Key](https://www.meilisearch.com/docs/reference/api/keys#get-one-key) and [get Keys](https://www.meilisearch.com/docs/reference/api/keys#get-all-keys) endpoints. + #[serde(rename = "keys.get")] + KeyGet, + /// Provides access to the [create key](https://www.meilisearch.com/docs/reference/api/keys#create-a-key) endpoint. + #[serde(rename = "keys.create")] + KeyCreate, + /// Provides access to the [update key](https://www.meilisearch.com/docs/reference/api/keys#update-a-key) endpoint. + #[serde(rename = "keys.update")] + KeyUpdate, + /// Provides access to the [delete key](https://www.meilisearch.com/docs/reference/api/keys#delete-a-key) endpoint. + #[serde(rename = "keys.delete")] + KeyDelete, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct KeysResults { + pub results: Vec, + pub limit: u32, + pub offset: u32, +} diff --git a/backend/vendor/meilisearch-sdk/src/lib.rs b/backend/vendor/meilisearch-sdk/src/lib.rs new file mode 100644 index 000000000..96e7bc886 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/lib.rs @@ -0,0 +1,280 @@ +//! # 🚀 Getting started +//! +//! ### Add Documents +//! +//! ``` +//! use meilisearch_sdk::client::*; +//! use serde::{Serialize, Deserialize}; +//! use futures::executor::block_on; +//! +//! #[derive(Serialize, Deserialize, Debug)] +//! struct Movie { +//! id: usize, +//! title: String, +//! genres: Vec, +//! } +//! +//! +//! #[tokio::main(flavor = "current_thread")] +//! async fn main() { +//! # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); +//! # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); +//! // Create a client (without sending any request so that can't fail) +//! let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); +//! +//! # let index = client.create_index("movies", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap().try_make_index(&client).unwrap(); +//! // An index is where the documents are stored. +//! let movies = client.index("movies"); +//! +//! // Add some movies in the index. If the index 'movies' does not exist, Meilisearch creates it when you first add the documents. +//! movies.add_documents(&[ +//! Movie { id: 1, title: String::from("Carol"), genres: vec!["Romance".to_string(), "Drama".to_string()] }, +//! Movie { id: 2, title: String::from("Wonder Woman"), genres: vec!["Action".to_string(), "Adventure".to_string()] }, +//! Movie { id: 3, title: String::from("Life of Pi"), genres: vec!["Adventure".to_string(), "Drama".to_string()] }, +//! Movie { id: 4, title: String::from("Mad Max"), genres: vec!["Adventure".to_string(), "Science Fiction".to_string()] }, +//! Movie { id: 5, title: String::from("Moana"), genres: vec!["Fantasy".to_string(), "Action".to_string()] }, +//! Movie { id: 6, title: String::from("Philadelphia"), genres: vec!["Drama".to_string()] }, +//! ], Some("id")).await.unwrap(); +//! # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); +//! } +//! ``` +//! +//! With the `uid`, you can check the status (`enqueued`, `canceled`, `processing`, `succeeded` or `failed`) of your documents addition using the [task](https://www.meilisearch.com/docs/reference/api/tasks#get-task). +//! +//! ### Basic Search +//! +//! ``` +//! # use meilisearch_sdk::client::*; +//! # use serde::{Serialize, Deserialize}; +//! # #[derive(Serialize, Deserialize, Debug)] +//! # struct Movie { +//! # id: usize, +//! # title: String, +//! # genres: Vec, +//! # } +//! # fn main() { tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { +//! # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); +//! # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); +//! # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); +//! # let movies = client.create_index("movies_2", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap().try_make_index(&client).unwrap(); +//! // Meilisearch is typo-tolerant: +//! println!("{:?}", client.index("movies_2").search().with_query("caorl").execute::().await.unwrap().hits); +//! # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); +//! # })} +//! ``` +//! +//! Output: +//! ```text +//! [Movie { id: 1, title: String::from("Carol"), genres: vec!["Romance", "Drama"] }] +//! ``` +//! +//! Json output: +//! ```json +//! { +//! "hits": [{ +//! "id": 1, +//! "title": "Carol", +//! "genres": ["Romance", "Drama"] +//! }], +//! "offset": 0, +//! "limit": 10, +//! "processingTimeMs": 1, +//! "query": "caorl" +//! } +//! ``` +//! +//! ### Custom Search +//! +//! ``` +//! # use meilisearch_sdk::{client::*, search::*}; +//! # use serde::{Serialize, Deserialize}; +//! # #[derive(Serialize, Deserialize, Debug)] +//! # struct Movie { +//! # id: usize, +//! # title: String, +//! # genres: Vec, +//! # } +//! # fn main() { tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { +//! # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); +//! # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); +//! # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); +//! # let movies = client.create_index("movies_3", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap().try_make_index(&client).unwrap(); +//! let search_result = client.index("movies_3") +//! .search() +//! .with_query("phil") +//! .with_attributes_to_highlight(Selectors::Some(&["*"])) +//! .execute::() +//! .await +//! .unwrap(); +//! println!("{:?}", search_result.hits); +//! # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); +//! # })} +//! ``` +//! +//! Json output: +//! ```json +//! { +//! "hits": [ +//! { +//! "id": 6, +//! "title": "Philadelphia", +//! "_formatted": { +//! "id": 6, +//! "title": "Philadelphia", +//! "genre": ["Drama"] +//! } +//! } +//! ], +//! "offset": 0, +//! "limit": 20, +//! "processingTimeMs": 0, +//! "query": "phil" +//! } +//! ``` +//! +//! ### Custom Search With Filters +//! +//! If you want to enable filtering, you must add your attributes to the `filterableAttributes` +//! index setting. +//! +//! ``` +//! # use meilisearch_sdk::{client::*}; +//! # use serde::{Serialize, Deserialize}; +//! # fn main() { tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { +//! # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); +//! # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); +//! # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); +//! # let movies = client.create_index("movies_4", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap().try_make_index(&client).unwrap(); +//! let filterable_attributes = [ +//! "id", +//! "genres", +//! ]; +//! client.index("movies_4").set_filterable_attributes(&filterable_attributes).await.unwrap(); +//! # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); +//! # })} +//! ``` +//! +//! You only need to perform this operation once. +//! +//! Note that Meilisearch will rebuild your index whenever you update `filterableAttributes`. Depending on the size of your dataset, this might take time. You can track the process using the [tasks](https://www.meilisearch.com/docs/reference/api/tasks#get-task). +//! +//! Then, you can perform the search: +//! +//! ``` +//! # use meilisearch_sdk::{client::*, search::*}; +//! # use serde::{Serialize, Deserialize}; +//! # #[derive(Serialize, Deserialize, Debug)] +//! # struct Movie { +//! # id: usize, +//! # title: String, +//! # genres: Vec, +//! # } +//! # fn main() { tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { +//! # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); +//! # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); +//! # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); +//! # let movies = client.create_index("movies_5", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap().try_make_index(&client).unwrap(); +//! # let filterable_attributes = [ +//! # "id", +//! # "genres" +//! # ]; +//! # movies.set_filterable_attributes(&filterable_attributes).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); +//! # movies.add_documents(&[ +//! # Movie { id: 1, title: String::from("Carol"), genres: vec!["Romance".to_string(), "Drama".to_string()] }, +//! # Movie { id: 2, title: String::from("Wonder Woman"), genres: vec!["Action".to_string(), "Adventure".to_string()] }, +//! # Movie { id: 3, title: String::from("Life of Pi"), genres: vec!["Adventure".to_string(), "Drama".to_string()] }, +//! # Movie { id: 4, title: String::from("Mad Max"), genres: vec!["Adventure".to_string(), "Science Fiction".to_string()] }, +//! # Movie { id: 5, title: String::from("Moana"), genres: vec!["Fantasy".to_string(), "Action".to_string()] }, +//! # Movie { id: 6, title: String::from("Philadelphia"), genres: vec!["Drama".to_string()] }, +//! # ], Some("id")).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); +//! let search_result = client.index("movies_5") +//! .search() +//! .with_query("wonder") +//! .with_filter("id > 1 AND genres = Action") +//! .execute::() +//! .await +//! .unwrap(); +//! println!("{:?}", search_result.hits); +//! # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); +//! # })} +//! ``` +//! +//! Json output: +//! ```json +//! { +//! "hits": [ +//! { +//! "id": 2, +//! "title": "Wonder Woman", +//! "genres": ["Action", "Adventure"] +//! } +//! ], +//! "offset": 0, +//! "limit": 20, +//! "estimatedTotalHits": 1, +//! "processingTimeMs": 0, +//! "query": "wonder" +//! } +//! ``` +//! +//! ### Customize the `HttpClient` +//! +//! By default, the SDK uses [`reqwest`](https://docs.rs/reqwest/latest/reqwest/) to make http calls. +//! The SDK lets you customize the http client by implementing the `HttpClient` trait yourself and +//! initializing the `Client` with the `new_with_client` method. +//! You may be interested by the `futures-unsend` feature which lets you specify a non-Send http client. +//! +//! ### Wasm support +//! +//! The SDK supports wasm through reqwest. You'll need to enable the `futures-unsend` feature while importing it, though. +#![warn(clippy::all)] +#![allow(clippy::needless_doctest_main)] + +/// Module containing the [`Client`] struct. +pub mod client; +/// Module representing the [documents] structures. +pub mod documents; +/// Module containing the [dumps] trait. +pub mod dumps; +/// Module containing the [`errors::Error`] struct. +pub mod errors; +/// Module related to runtime and instance features. +pub mod features; +/// Module containing the Index struct. +pub mod indexes; +/// Module containing the [`Key`] struct. +pub mod key; +pub mod request; +/// Module related to search queries and results. +pub mod search; +/// Module containing [`Settings`]. +pub mod settings; +/// Module containing the [snapshots] trait. +pub mod snapshots; +/// Module representing the [`TaskInfo`]s. +pub mod task_info; +/// Module representing the [`Task`]s. +pub mod tasks; +/// Module that generates tenant tokens. +#[cfg(not(target_arch = "wasm32"))] +mod tenant_tokens; +/// Module containing utilizes functions. +mod utils; + +#[cfg(feature = "reqwest")] +pub mod reqwest; + +#[cfg(feature = "reqwest")] +pub type DefaultHttpClient = reqwest::ReqwestClient; + +#[cfg(not(feature = "reqwest"))] +pub type DefaultHttpClient = std::convert::Infallible; + +#[cfg(test)] +/// Support for the `IndexConfig` derive proc macro in the crate's tests. +extern crate self as meilisearch_sdk; +/// Can't assume that the user of proc_macro will have access to `async_trait` crate. So exporting the `async-trait` crate from `meilisearch_sdk` in a hidden module. +#[doc(hidden)] +pub mod macro_helper { + pub use async_trait::async_trait; +} diff --git a/backend/vendor/meilisearch-sdk/src/request.rs b/backend/vendor/meilisearch-sdk/src/request.rs new file mode 100644 index 000000000..366e51b9a --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/request.rs @@ -0,0 +1,177 @@ +use std::convert::Infallible; + +use async_trait::async_trait; +use log::{error, trace, warn}; +use serde::{de::DeserializeOwned, Serialize}; +use serde_json::{from_str, to_vec}; + +use crate::errors::{Error, MeilisearchCommunicationError, MeilisearchError}; + +#[derive(Debug)] +pub enum Method { + Get { query: Q }, + Post { query: Q, body: B }, + Patch { query: Q, body: B }, + Put { query: Q, body: B }, + Delete { query: Q }, +} + +impl Method { + pub fn map_body(self, f: impl Fn(B) -> B2) -> Method { + match self { + Method::Get { query } => Method::Get { query }, + Method::Delete { query } => Method::Delete { query }, + Method::Post { query, body } => Method::Post { + query, + body: f(body), + }, + Method::Patch { query, body } => Method::Patch { + query, + body: f(body), + }, + Method::Put { query, body } => Method::Put { + query, + body: f(body), + }, + } + } + + pub fn query(&self) -> &Q { + match self { + Method::Get { query } => query, + Method::Delete { query } => query, + Method::Post { query, .. } => query, + Method::Put { query, .. } => query, + Method::Patch { query, .. } => query, + } + } + + pub fn body(&self) -> Option<&B> { + match self { + Method::Get { query: _ } | Method::Delete { query: _ } => None, + Method::Post { body, query: _ } => Some(body), + Method::Put { body, query: _ } => Some(body), + Method::Patch { body, query: _ } => Some(body), + } + } + + pub fn into_body(self) -> Option { + match self { + Method::Get { query: _ } | Method::Delete { query: _ } => None, + Method::Post { body, query: _ } => Some(body), + Method::Put { body, query: _ } => Some(body), + Method::Patch { body, query: _ } => Some(body), + } + } +} + +#[cfg_attr(feature = "futures-unsend", async_trait(?Send))] +#[cfg_attr(not(feature = "futures-unsend"), async_trait)] +pub trait HttpClient: Clone + Send + Sync { + async fn request( + &self, + url: &str, + method: Method, + expected_status_code: u16, + ) -> Result + where + Query: Serialize + Send + Sync, + Body: Serialize + Send + Sync, + Output: DeserializeOwned + 'static + Send, + { + use futures::io::Cursor; + + self.stream_request( + url, + method.map_body(|body| Cursor::new(to_vec(&body).unwrap())), + "application/json", + expected_status_code, + ) + .await + } + + async fn stream_request< + Query: Serialize + Send + Sync, + Body: futures_io::AsyncRead + Send + Sync + 'static, + Output: DeserializeOwned + 'static, + >( + &self, + url: &str, + method: Method, + content_type: &str, + expected_status_code: u16, + ) -> Result; +} + +pub fn parse_response( + status_code: u16, + expected_status_code: u16, + body: &str, + url: String, +) -> Result { + if status_code == expected_status_code { + return match from_str::(body) { + Ok(output) => { + trace!("Request succeed"); + Ok(output) + } + Err(e) => { + error!("Request succeeded but failed to parse response"); + Err(Error::ParseError(e)) + } + }; + } + + warn!( + "Expected response code {}, got {}", + expected_status_code, status_code + ); + + match from_str::(body) { + Ok(e) => Err(Error::from(e)), + Err(e) => { + if status_code >= 400 { + return Err(Error::MeilisearchCommunication( + MeilisearchCommunicationError { + status_code, + message: None, + url, + }, + )); + } + Err(Error::ParseError(e)) + } + } +} + +#[cfg_attr(feature = "futures-unsend", async_trait(?Send))] +#[cfg_attr(not(feature = "futures-unsend"), async_trait)] +impl HttpClient for Infallible { + async fn request( + &self, + _url: &str, + _method: Method, + _expected_status_code: u16, + ) -> Result + where + Query: Serialize + Send + Sync, + Body: Serialize + Send + Sync, + Output: DeserializeOwned + 'static + Send, + { + unreachable!() + } + + async fn stream_request< + Query: Serialize + Send + Sync, + Body: futures_io::AsyncRead + Send + Sync + 'static, + Output: DeserializeOwned + 'static, + >( + &self, + _url: &str, + _method: Method, + _content_type: &str, + _expected_status_code: u16, + ) -> Result { + unreachable!() + } +} diff --git a/backend/vendor/meilisearch-sdk/src/reqwest.rs b/backend/vendor/meilisearch-sdk/src/reqwest.rs new file mode 100644 index 000000000..26cf892aa --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/reqwest.rs @@ -0,0 +1,173 @@ +use std::{ + pin::Pin, + task::{Context, Poll}, +}; + +use async_trait::async_trait; +use bytes::{Bytes, BytesMut}; +use futures::{AsyncRead, Stream}; +use pin_project_lite::pin_project; +use serde::{de::DeserializeOwned, Serialize}; + +use crate::{ + errors::Error, + request::{parse_response, HttpClient, Method}, +}; + +#[derive(Debug, Clone, Default)] +pub struct ReqwestClient { + client: reqwest::Client, +} + +impl ReqwestClient { + pub fn new(api_key: Option<&str>) -> Result { + use reqwest::{header, ClientBuilder}; + + let builder = ClientBuilder::new(); + let mut headers = header::HeaderMap::new(); + #[cfg(not(target_arch = "wasm32"))] + headers.insert( + header::USER_AGENT, + header::HeaderValue::from_str(&qualified_version()).unwrap(), + ); + #[cfg(target_arch = "wasm32")] + headers.insert( + header::HeaderName::from_static("x-meilisearch-client"), + header::HeaderValue::from_str(&qualified_version()).unwrap(), + ); + + if let Some(api_key) = api_key { + headers.insert( + header::AUTHORIZATION, + header::HeaderValue::from_str(&format!("Bearer {api_key}")).unwrap(), + ); + } + + let builder = builder.default_headers(headers); + let client = builder.build()?; + + Ok(ReqwestClient { client }) + } +} + +#[cfg_attr(feature = "futures-unsend", async_trait(?Send))] +#[cfg_attr(not(feature = "futures-unsend"), async_trait)] +impl HttpClient for ReqwestClient { + async fn stream_request< + Query: Serialize + Send + Sync, + Body: futures_io::AsyncRead + Send + Sync + 'static, + Output: DeserializeOwned + 'static, + >( + &self, + url: &str, + method: Method, + content_type: &str, + expected_status_code: u16, + ) -> Result { + use reqwest::header; + + let query = method.query(); + let query = yaup::to_string(query)?; + + let url = if query.is_empty() { + url.to_string() + } else { + format!("{url}{query}") + }; + + let mut request = self.client.request(verb(&method), &url); + + if let Some(body) = method.into_body() { + // TODO: Currently reqwest doesn't support streaming data in wasm so we need to collect everything in RAM + #[cfg(not(target_arch = "wasm32"))] + { + let stream = ReaderStream::new(body); + let body = reqwest::Body::wrap_stream(stream); + + request = request + .header(header::CONTENT_TYPE, content_type) + .body(body); + } + #[cfg(target_arch = "wasm32")] + { + use futures::{pin_mut, AsyncReadExt}; + + let mut buf = Vec::new(); + pin_mut!(body); + body.read_to_end(&mut buf) + .await + .map_err(|err| Error::Other(Box::new(err)))?; + request = request.header(header::CONTENT_TYPE, content_type).body(buf); + } + } + + let response = self.client.execute(request.build()?).await?; + let status = response.status().as_u16(); + let mut body = response.text().await?; + + if body.is_empty() { + body = "null".to_string(); + } + + parse_response(status, expected_status_code, &body, url.to_string()) + } +} + +fn verb(method: &Method) -> reqwest::Method { + match method { + Method::Get { .. } => reqwest::Method::GET, + Method::Delete { .. } => reqwest::Method::DELETE, + Method::Post { .. } => reqwest::Method::POST, + Method::Put { .. } => reqwest::Method::PUT, + Method::Patch { .. } => reqwest::Method::PATCH, + } +} + +pub fn qualified_version() -> String { + const VERSION: Option<&str> = option_env!("CARGO_PKG_VERSION"); + + format!("Meilisearch Rust (v{})", VERSION.unwrap_or("unknown")) +} + +pin_project! { + #[derive(Debug)] + pub struct ReaderStream { + #[pin] + reader: R, + buf: BytesMut, + capacity: usize, + } +} + +impl ReaderStream { + pub fn new(reader: R) -> Self { + Self { + reader, + buf: BytesMut::new(), + // 8KiB of capacity, the default capacity used by `BufReader` in the std + capacity: 8 * 1024 * 1024, + } + } +} + +impl Stream for ReaderStream { + type Item = std::io::Result; + + fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + let this = self.as_mut().project(); + + if this.buf.capacity() == 0 { + this.buf.resize(*this.capacity, 0); + } + + match AsyncRead::poll_read(this.reader, cx, this.buf) { + Poll::Pending => Poll::Pending, + Poll::Ready(Err(err)) => Poll::Ready(Some(Err(err))), + Poll::Ready(Ok(0)) => Poll::Ready(None), + Poll::Ready(Ok(i)) => { + let chunk = this.buf.split_to(i); + Poll::Ready(Some(Ok(chunk.freeze()))) + } + } + } +} diff --git a/backend/vendor/meilisearch-sdk/src/search.rs b/backend/vendor/meilisearch-sdk/src/search.rs new file mode 100644 index 000000000..b1e548fc6 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/search.rs @@ -0,0 +1,1406 @@ +use crate::{ + client::Client, errors::Error, indexes::Index, request::HttpClient, DefaultHttpClient, +}; +use either::Either; +use serde::{de::DeserializeOwned, Deserialize, Serialize, Serializer}; +use serde_json::{Map, Value}; +use std::collections::HashMap; + +#[derive(Deserialize, Debug, Eq, PartialEq, Clone)] +pub struct MatchRange { + pub start: usize, + pub length: usize, + + /// If the match is somewhere inside a (potentially nested) array, this + /// field is set to the index/indices of the matched element(s). + /// + /// In the simple case, if the field has the value `["foo", "bar"]`, then + /// searching for `ba` will return `indices: Some([1])`. If the value + /// contains multiple nested arrays, the first index describes the most + /// top-level array, and descending from there. For example, if the value is + /// `[{ x: "cat" }, "bear", { y: ["dog", "fox"] }]`, searching for `dog` + /// will return `indices: Some([2, 0])`. + pub indices: Option>, +} + +#[derive(Serialize, Debug, Eq, PartialEq, Clone)] +#[serde(transparent)] +pub struct Filter<'a> { + #[serde(with = "either::serde_untagged")] + inner: Either<&'a str, Vec<&'a str>>, +} + +impl<'a> Filter<'a> { + #[must_use] + pub fn new(inner: Either<&'a str, Vec<&'a str>>) -> Filter<'a> { + Filter { inner } + } +} + +#[derive(Debug, Clone, Serialize)] +pub enum MatchingStrategies { + #[serde(rename = "all")] + ALL, + #[serde(rename = "last")] + LAST, + #[serde(rename = "frequency")] + FREQUENCY, +} + +/// A single result. +/// +/// Contains the complete object, optionally the formatted object, and optionally an object that contains information about the matches. +#[derive(Deserialize, Debug, Clone)] +pub struct SearchResult { + /// The full result. + #[serde(flatten)] + pub result: T, + /// The formatted result. + #[serde(rename = "_formatted")] + pub formatted_result: Option>, + /// The object that contains information about the matches. + #[serde(rename = "_matchesPosition")] + pub matches_position: Option>>, + /// The relevancy score of the match. + #[serde(rename = "_rankingScore")] + pub ranking_score: Option, + #[serde(rename = "_rankingScoreDetails")] + pub ranking_score_details: Option>, + /// Only returned for federated multi search. + #[serde(rename = "_federation")] + pub federation: Option, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct FacetStats { + pub min: f64, + pub max: f64, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +/// A struct containing search results and other information about the search. +pub struct SearchResults { + /// Results of the query. + pub hits: Vec>, + /// Number of documents skipped. + pub offset: Option, + /// Number of results returned. + pub limit: Option, + /// Estimated total number of matches. + pub estimated_total_hits: Option, + // Current page number + pub page: Option, + // Maximum number of hits in a page. + pub hits_per_page: Option, + // Exhaustive number of matches. + pub total_hits: Option, + // Exhaustive number of pages. + pub total_pages: Option, + /// Distribution of the given facets. + pub facet_distribution: Option>>, + /// facet stats of the numerical facets requested in the `facet` search parameter. + pub facet_stats: Option>, + /// Processing time of the query. + pub processing_time_ms: usize, + /// Query originating the response. + pub query: String, + /// Index uid on which the search was made. + pub index_uid: Option, +} + +fn serialize_with_wildcard( + data: &Option>, + s: S, +) -> Result { + match data { + Some(Selectors::All) => ["*"].serialize(s), + Some(Selectors::Some(data)) => data.serialize(s), + None => s.serialize_none(), + } +} + +fn serialize_attributes_to_crop_with_wildcard( + data: &Option>, + s: S, +) -> Result { + match data { + Some(Selectors::All) => ["*"].serialize(s), + Some(Selectors::Some(data)) => { + let results = data + .iter() + .map(|(name, value)| { + let mut result = (*name).to_string(); + if let Some(value) = value { + result.push(':'); + result.push_str(value.to_string().as_str()); + } + result + }) + .collect::>(); + results.serialize(s) + } + None => s.serialize_none(), + } +} + +/// Some list fields in a `SearchQuery` can be set to a wildcard value. +/// +/// This structure allows you to choose between the wildcard value and an exhaustive list of selectors. +#[derive(Debug, Clone)] +pub enum Selectors { + /// A list of selectors. + Some(T), + /// The wildcard. + All, +} + +type AttributeToCrop<'a> = (&'a str, Option); + +/// A struct representing a query. +/// +/// You can add search parameters using the builder syntax. +/// +/// See [this page](https://www.meilisearch.com/docs/reference/api/search#query-q) for the official list and description of all parameters. +/// +/// # Examples +/// +/// ``` +/// # use serde::{Serialize, Deserialize}; +/// # use meilisearch_sdk::{client::Client, search::*, indexes::Index}; +/// # +/// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); +/// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); +/// # +/// #[derive(Serialize, Deserialize, Debug)] +/// struct Movie { +/// name: String, +/// description: String, +/// } +/// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { +/// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); +/// # let index = client +/// # .create_index("search_query_builder", None) +/// # .await +/// # .unwrap() +/// # .wait_for_completion(&client, None, None) +/// # .await.unwrap() +/// # .try_make_index(&client) +/// # .unwrap(); +/// +/// let mut res = SearchQuery::new(&index) +/// .with_query("space") +/// .with_offset(42) +/// .with_limit(21) +/// .execute::() +/// .await +/// .unwrap(); +/// +/// assert_eq!(res.limit, Some(21)); +/// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); +/// # }); +/// ``` +/// +/// ``` +/// # use meilisearch_sdk::{client::Client, search::*, indexes::Index}; +/// # +/// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); +/// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); +/// # +/// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); +/// # let index = client.index("search_query_builder_build"); +/// let query = index.search() +/// .with_query("space") +/// .with_offset(42) +/// .with_limit(21) +/// .build(); // you can also execute() instead of build() +/// ``` +#[derive(Debug, Serialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct SearchQuery<'a, Http: HttpClient> { + #[serde(skip_serializing)] + index: &'a Index, + /// The text that will be searched for among the documents. + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(rename = "q")] + pub query: Option<&'a str>, + /// The number of documents to skip. + /// + /// If the value of the parameter `offset` is `n`, the `n` first documents (ordered by relevance) will not be returned. + /// This is helpful for pagination. + /// + /// Example: If you want to skip the first document, set offset to `1`. + #[serde(skip_serializing_if = "Option::is_none")] + pub offset: Option, + /// The maximum number of documents returned. + /// + /// If the value of the parameter `limit` is `n`, there will never be more than `n` documents in the response. + /// This is helpful for pagination. + /// + /// Example: If you don't want to get more than two documents, set limit to `2`. + /// + /// **Default: `20`** + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + /// The page number on which you paginate. + /// + /// Pagination starts at 1. If page is 0, no results are returned. + /// + /// **Default: None unless `hits_per_page` is defined, in which case page is `1`** + #[serde(skip_serializing_if = "Option::is_none")] + pub page: Option, + /// The maximum number of results in a page. A page can contain less results than the number of hits_per_page. + /// + /// **Default: None unless `page` is defined, in which case `20`** + #[serde(skip_serializing_if = "Option::is_none")] + pub hits_per_page: Option, + /// Filter applied to documents. + /// + /// Read the [dedicated guide](https://www.meilisearch.com/docs/learn/advanced/filtering) to learn the syntax. + #[serde(skip_serializing_if = "Option::is_none")] + pub filter: Option>, + /// Facets for which to retrieve the matching count. + /// + /// Can be set to a [wildcard value](enum.Selectors.html#variant.All) that will select all existing attributes. + /// + /// **Default: all attributes found in the documents.** + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(serialize_with = "serialize_with_wildcard")] + pub facets: Option>, + /// Attributes to sort. + #[serde(skip_serializing_if = "Option::is_none")] + pub sort: Option<&'a [&'a str]>, + /// Attributes to perform the search on. + /// + /// Specify the subset of searchableAttributes for a search without modifying Meilisearch’s index settings. + /// + /// **Default: all searchable attributes found in the documents.** + #[serde(skip_serializing_if = "Option::is_none")] + pub attributes_to_search_on: Option<&'a [&'a str]>, + /// Attributes to display in the returned documents. + /// + /// Can be set to a [wildcard value](enum.Selectors.html#variant.All) that will select all existing attributes. + /// + /// **Default: all attributes found in the documents.** + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(serialize_with = "serialize_with_wildcard")] + pub attributes_to_retrieve: Option>, + /// Attributes whose values have to be cropped. + /// + /// Attributes are composed by the attribute name and an optional `usize` that overwrites the `crop_length` parameter. + /// + /// Can be set to a [wildcard value](enum.Selectors.html#variant.All) that will select all existing attributes. + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(serialize_with = "serialize_attributes_to_crop_with_wildcard")] + pub attributes_to_crop: Option]>>, + /// Maximum number of words including the matched query term(s) contained in the returned cropped value(s). + /// + /// See [attributes_to_crop](#structfield.attributes_to_crop). + /// + /// **Default: `10`** + #[serde(skip_serializing_if = "Option::is_none")] + pub crop_length: Option, + /// Marker at the start and the end of a cropped value. + /// + /// ex: `...middle of a crop...` + /// + /// **Default: `...`** + #[serde(skip_serializing_if = "Option::is_none")] + pub crop_marker: Option<&'a str>, + /// Attributes whose values will contain **highlighted matching terms**. + /// + /// Can be set to a [wildcard value](enum.Selectors.html#variant.All) that will select all existing attributes. + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(serialize_with = "serialize_with_wildcard")] + pub attributes_to_highlight: Option>, + /// Tag in front of a highlighted term. + /// + /// ex: `hello world` + /// + /// **Default: ``** + #[serde(skip_serializing_if = "Option::is_none")] + pub highlight_pre_tag: Option<&'a str>, + /// Tag after a highlighted term. + /// + /// ex: `hello world` + /// + /// **Default: ``** + #[serde(skip_serializing_if = "Option::is_none")] + pub highlight_post_tag: Option<&'a str>, + /// Defines whether an object that contains information about the matches should be returned or not. + /// + /// **Default: `false`** + #[serde(skip_serializing_if = "Option::is_none")] + pub show_matches_position: Option, + + /// Defines whether to show the relevancy score of the match. + /// + /// **Default: `false`** + #[serde(skip_serializing_if = "Option::is_none")] + pub show_ranking_score: Option, + + ///Adds a detailed global ranking score field to each document. + /// + /// **Default: `false`** + #[serde(skip_serializing_if = "Option::is_none")] + pub show_ranking_score_details: Option, + + /// Defines the strategy on how to handle queries containing multiple words. + #[serde(skip_serializing_if = "Option::is_none")] + pub matching_strategy: Option, + + ///Defines one attribute in the filterableAttributes list as a distinct attribute. + #[serde(skip_serializing_if = "Option::is_none")] + pub distinct: Option<&'a str>, + + ///Excludes results below the specified ranking score. + #[serde(skip_serializing_if = "Option::is_none")] + pub ranking_score_threshold: Option, + + /// Defines the language of the search query. + #[serde(skip_serializing_if = "Option::is_none")] + pub locales: Option<&'a [&'a str]>, + + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) index_uid: Option<&'a str>, + + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) federation_options: Option, +} + +#[derive(Debug, Serialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct QueryFederationOptions { + #[serde(skip_serializing_if = "Option::is_none")] + pub weight: Option, +} + +#[allow(missing_docs)] +impl<'a, Http: HttpClient> SearchQuery<'a, Http> { + #[must_use] + pub fn new(index: &'a Index) -> SearchQuery<'a, Http> { + SearchQuery { + index, + query: None, + offset: None, + limit: None, + page: None, + hits_per_page: None, + filter: None, + sort: None, + facets: None, + attributes_to_search_on: None, + attributes_to_retrieve: None, + attributes_to_crop: None, + crop_length: None, + crop_marker: None, + attributes_to_highlight: None, + highlight_pre_tag: None, + highlight_post_tag: None, + show_matches_position: None, + show_ranking_score: None, + show_ranking_score_details: None, + matching_strategy: None, + index_uid: None, + distinct: None, + ranking_score_threshold: None, + locales: None, + federation_options: None, + } + } + pub fn with_query<'b>(&'b mut self, query: &'a str) -> &'b mut SearchQuery<'a, Http> { + self.query = Some(query); + self + } + + pub fn with_offset<'b>(&'b mut self, offset: usize) -> &'b mut SearchQuery<'a, Http> { + self.offset = Some(offset); + self + } + pub fn with_limit<'b>(&'b mut self, limit: usize) -> &'b mut SearchQuery<'a, Http> { + self.limit = Some(limit); + self + } + /// Add the page number on which to paginate. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*, search::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # #[derive(Serialize, Deserialize, Debug)] + /// # struct Movie { + /// # name: String, + /// # description: String, + /// # } + /// # client.create_index("search_with_page", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("search_with_page"); + /// + /// let mut query = SearchQuery::new(&index); + /// query.with_query("").with_page(2); + /// let res = query.execute::().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub fn with_page<'b>(&'b mut self, page: usize) -> &'b mut SearchQuery<'a, Http> { + self.page = Some(page); + self + } + + /// Add the maximum number of results per page. + /// + /// # Example + /// + /// ``` + /// # use serde::{Serialize, Deserialize}; + /// # use meilisearch_sdk::{client::*, indexes::*, search::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # #[derive(Serialize, Deserialize, Debug)] + /// # struct Movie { + /// # name: String, + /// # description: String, + /// # } + /// # client.create_index("search_with_hits_per_page", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("search_with_hits_per_page"); + /// + /// let mut query = SearchQuery::new(&index); + /// query.with_query("").with_hits_per_page(2); + /// let res = query.execute::().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub fn with_hits_per_page<'b>( + &'b mut self, + hits_per_page: usize, + ) -> &'b mut SearchQuery<'a, Http> { + self.hits_per_page = Some(hits_per_page); + self + } + pub fn with_filter<'b>(&'b mut self, filter: &'a str) -> &'b mut SearchQuery<'a, Http> { + self.filter = Some(Filter::new(Either::Left(filter))); + self + } + pub fn with_array_filter<'b>( + &'b mut self, + filter: Vec<&'a str>, + ) -> &'b mut SearchQuery<'a, Http> { + self.filter = Some(Filter::new(Either::Right(filter))); + self + } + pub fn with_facets<'b>( + &'b mut self, + facets: Selectors<&'a [&'a str]>, + ) -> &'b mut SearchQuery<'a, Http> { + self.facets = Some(facets); + self + } + pub fn with_sort<'b>(&'b mut self, sort: &'a [&'a str]) -> &'b mut SearchQuery<'a, Http> { + self.sort = Some(sort); + self + } + + pub fn with_attributes_to_search_on<'b>( + &'b mut self, + attributes_to_search_on: &'a [&'a str], + ) -> &'b mut SearchQuery<'a, Http> { + self.attributes_to_search_on = Some(attributes_to_search_on); + self + } + pub fn with_attributes_to_retrieve<'b>( + &'b mut self, + attributes_to_retrieve: Selectors<&'a [&'a str]>, + ) -> &'b mut SearchQuery<'a, Http> { + self.attributes_to_retrieve = Some(attributes_to_retrieve); + self + } + pub fn with_attributes_to_crop<'b>( + &'b mut self, + attributes_to_crop: Selectors<&'a [(&'a str, Option)]>, + ) -> &'b mut SearchQuery<'a, Http> { + self.attributes_to_crop = Some(attributes_to_crop); + self + } + pub fn with_crop_length<'b>(&'b mut self, crop_length: usize) -> &'b mut SearchQuery<'a, Http> { + self.crop_length = Some(crop_length); + self + } + pub fn with_crop_marker<'b>( + &'b mut self, + crop_marker: &'a str, + ) -> &'b mut SearchQuery<'a, Http> { + self.crop_marker = Some(crop_marker); + self + } + pub fn with_attributes_to_highlight<'b>( + &'b mut self, + attributes_to_highlight: Selectors<&'a [&'a str]>, + ) -> &'b mut SearchQuery<'a, Http> { + self.attributes_to_highlight = Some(attributes_to_highlight); + self + } + pub fn with_highlight_pre_tag<'b>( + &'b mut self, + highlight_pre_tag: &'a str, + ) -> &'b mut SearchQuery<'a, Http> { + self.highlight_pre_tag = Some(highlight_pre_tag); + self + } + pub fn with_highlight_post_tag<'b>( + &'b mut self, + highlight_post_tag: &'a str, + ) -> &'b mut SearchQuery<'a, Http> { + self.highlight_post_tag = Some(highlight_post_tag); + self + } + pub fn with_show_matches_position<'b>( + &'b mut self, + show_matches_position: bool, + ) -> &'b mut SearchQuery<'a, Http> { + self.show_matches_position = Some(show_matches_position); + self + } + + pub fn with_show_ranking_score<'b>( + &'b mut self, + show_ranking_score: bool, + ) -> &'b mut SearchQuery<'a, Http> { + self.show_ranking_score = Some(show_ranking_score); + self + } + + pub fn with_show_ranking_score_details<'b>( + &'b mut self, + show_ranking_score_details: bool, + ) -> &'b mut SearchQuery<'a, Http> { + self.show_ranking_score_details = Some(show_ranking_score_details); + self + } + + pub fn with_matching_strategy<'b>( + &'b mut self, + matching_strategy: MatchingStrategies, + ) -> &'b mut SearchQuery<'a, Http> { + self.matching_strategy = Some(matching_strategy); + self + } + pub fn with_index_uid<'b>(&'b mut self) -> &'b mut SearchQuery<'a, Http> { + self.index_uid = Some(&self.index.uid); + self + } + pub fn with_distinct<'b>(&'b mut self, distinct: &'a str) -> &'b mut SearchQuery<'a, Http> { + self.distinct = Some(distinct); + self + } + pub fn with_ranking_score_threshold<'b>( + &'b mut self, + ranking_score_threshold: f64, + ) -> &'b mut SearchQuery<'a, Http> { + self.ranking_score_threshold = Some(ranking_score_threshold); + self + } + pub fn with_locales<'b>(&'b mut self, locales: &'a [&'a str]) -> &'b mut SearchQuery<'a, Http> { + self.locales = Some(locales); + self + } + /// Only usable in federated multi search queries. + pub fn with_federation_options<'b>( + &'b mut self, + federation_options: QueryFederationOptions, + ) -> &'b mut SearchQuery<'a, Http> { + self.federation_options = Some(federation_options); + self + } + pub fn build(&mut self) -> SearchQuery<'a, Http> { + self.clone() + } + /// Execute the query and fetch the results. + pub async fn execute( + &'a self, + ) -> Result, Error> { + self.index.execute_query::(self).await + } +} + +#[derive(Debug, Serialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct MultiSearchQuery<'a, 'b, Http: HttpClient = DefaultHttpClient> { + #[serde(skip_serializing)] + client: &'a Client, + // The weird `serialize = ""` is actually useful: without it, serde adds the + // bound `Http: Serialize` to the `Serialize` impl block, but that's not + // necessary. `SearchQuery` always implements `Serialize` (regardless of + // type parameter), so no bound is fine. + #[serde(bound(serialize = ""))] + pub queries: Vec>, +} + +#[allow(missing_docs)] +impl<'a, 'b, Http: HttpClient> MultiSearchQuery<'a, 'b, Http> { + #[must_use] + pub fn new(client: &'a Client) -> MultiSearchQuery<'a, 'b, Http> { + MultiSearchQuery { + client, + queries: Vec::new(), + } + } + pub fn with_search_query( + &mut self, + mut search_query: SearchQuery<'b, Http>, + ) -> &mut MultiSearchQuery<'a, 'b, Http> { + search_query.with_index_uid(); + self.queries.push(search_query); + self + } + /// Adds the `federation` parameter, making the search a federated search. + pub fn with_federation( + self, + federation: FederationOptions, + ) -> FederatedMultiSearchQuery<'a, 'b, Http> { + FederatedMultiSearchQuery { + client: self.client, + queries: self.queries, + federation: Some(federation), + } + } + + /// Execute the query and fetch the results. + pub async fn execute( + &'a self, + ) -> Result, Error> { + self.client.execute_multi_search_query::(self).await + } +} +#[derive(Debug, Clone, Deserialize)] +pub struct MultiSearchResponse { + pub results: Vec>, +} + +#[derive(Debug, Serialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct FederatedMultiSearchQuery<'a, 'b, Http: HttpClient = DefaultHttpClient> { + #[serde(skip_serializing)] + client: &'a Client, + #[serde(bound(serialize = ""))] + pub queries: Vec>, + pub federation: Option, +} + +/// The `federation` field of the multi search API. +/// See [the docs](https://www.meilisearch.com/docs/reference/api/multi_search#federation). +#[derive(Debug, Serialize, Clone, Default)] +#[serde(rename_all = "camelCase")] +pub struct FederationOptions { + #[serde(skip_serializing_if = "Option::is_none")] + pub offset: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub facets_by_index: Option>>, + #[serde(skip_serializing_if = "Option::is_none")] + pub merge_facets: Option, +} + +#[allow(missing_docs)] +impl<'a, 'b, Http: HttpClient> FederatedMultiSearchQuery<'a, 'b, Http> { + /// Execute the query and fetch the results. + pub async fn execute( + &'a self, + ) -> Result, Error> { + self.client + .execute_federated_multi_search_query::(self) + .await + } +} + +/// Returned by federated multi search. +#[derive(Debug, Deserialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct FederatedMultiSearchResponse { + /// Merged results of the query. + pub hits: Vec>, + + // TODO: are offset, limit and estimated_total_hits really non-optional? In + // my tests they are always returned, but that's not a proof. + /// Number of documents skipped. + pub offset: usize, + /// Number of results returned. + pub limit: usize, + /// Estimated total number of matches. + pub estimated_total_hits: usize, + + /// Distribution of the given facets. + pub facet_distribution: Option>>, + /// facet stats of the numerical facets requested in the `facet` search parameter. + pub facet_stats: Option>, + /// Processing time of the query. + pub processing_time_ms: usize, +} + +/// Returned for each hit in `_federation` when doing federated multi search. +#[derive(Debug, Deserialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct FederationHitInfo { + pub index_uid: String, + pub queries_position: usize, + // TOOD: not mentioned in the docs, is that optional? + pub weighted_ranking_score: f32, +} + +#[cfg(test)] +mod tests { + use crate::{ + client::*, + key::{Action, KeyBuilder}, + search::*, + }; + use big_s::S; + use meilisearch_test_macro::meilisearch_test; + use serde::{Deserialize, Serialize}; + use serde_json::{json, Map, Value}; + + #[derive(Debug, Serialize, Deserialize, PartialEq)] + struct Nested { + child: String, + } + + #[derive(Debug, Serialize, Deserialize, PartialEq)] + struct Document { + id: usize, + value: String, + kind: String, + number: i32, + nested: Nested, + } + + impl PartialEq> for Document { + #[allow(clippy::cmp_owned)] + fn eq(&self, rhs: &Map) -> bool { + self.id.to_string() == rhs["id"] + && self.value == rhs["value"] + && self.kind == rhs["kind"] + } + } + + async fn setup_test_index(client: &Client, index: &Index) -> Result<(), Error> { + let t0 = index.add_documents(&[ + Document { id: 0, kind: "text".into(), number: 0, value: S("Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."), nested: Nested { child: S("first") } }, + Document { id: 1, kind: "text".into(), number: 10, value: S("dolor sit amet, consectetur adipiscing elit"), nested: Nested { child: S("second") } }, + Document { id: 2, kind: "title".into(), number: 20, value: S("The Social Network"), nested: Nested { child: S("third") } }, + Document { id: 3, kind: "title".into(), number: 30, value: S("Harry Potter and the Sorcerer's Stone"), nested: Nested { child: S("fourth") } }, + Document { id: 4, kind: "title".into(), number: 40, value: S("Harry Potter and the Chamber of Secrets"), nested: Nested { child: S("fift") } }, + Document { id: 5, kind: "title".into(), number: 50, value: S("Harry Potter and the Prisoner of Azkaban"), nested: Nested { child: S("sixth") } }, + Document { id: 6, kind: "title".into(), number: 60, value: S("Harry Potter and the Goblet of Fire"), nested: Nested { child: S("seventh") } }, + Document { id: 7, kind: "title".into(), number: 70, value: S("Harry Potter and the Order of the Phoenix"), nested: Nested { child: S("eighth") } }, + Document { id: 8, kind: "title".into(), number: 80, value: S("Harry Potter and the Half-Blood Prince"), nested: Nested { child: S("ninth") } }, + Document { id: 9, kind: "title".into(), number: 90, value: S("Harry Potter and the Deathly Hallows"), nested: Nested { child: S("tenth") } }, + ], None).await?; + let t1 = index + .set_filterable_attributes(["kind", "value", "number"]) + .await?; + let t2 = index.set_sortable_attributes(["title"]).await?; + + t2.wait_for_completion(client, None, None).await?; + t1.wait_for_completion(client, None, None).await?; + t0.wait_for_completion(client, None, None).await?; + + Ok(()) + } + + #[meilisearch_test] + async fn test_multi_search(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + let search_query_1 = SearchQuery::new(&index) + .with_query("Sorcerer's Stone") + .build(); + let search_query_2 = SearchQuery::new(&index) + .with_query("Chamber of Secrets") + .build(); + + let response = client + .multi_search() + .with_search_query(search_query_1) + .with_search_query(search_query_2) + .execute::() + .await + .unwrap(); + + assert_eq!(response.results.len(), 2); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_builder(_client: Client, index: Index) -> Result<(), Error> { + let mut query = SearchQuery::new(&index); + query.with_query("space").with_offset(42).with_limit(21); + + let res = query.execute::().await.unwrap(); + + assert_eq!(res.query, S("space")); + assert_eq!(res.limit, Some(21)); + assert_eq!(res.offset, Some(42)); + assert_eq!(res.estimated_total_hits, Some(0)); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_numbered_pagination(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let mut query = SearchQuery::new(&index); + query.with_query("").with_page(2).with_hits_per_page(2); + + let res = query.execute::().await.unwrap(); + + assert_eq!(res.page, Some(2)); + assert_eq!(res.hits_per_page, Some(2)); + assert_eq!(res.total_hits, Some(10)); + assert_eq!(res.total_pages, Some(5)); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_string(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let results: SearchResults = index.search().with_query("dolor").execute().await?; + assert_eq!(results.hits.len(), 2); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_string_on_nested_field(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let results: SearchResults = + index.search().with_query("second").execute().await?; + + assert_eq!( + &Document { + id: 1, + value: S("dolor sit amet, consectetur adipiscing elit"), + kind: S("text"), + number: 10, + nested: Nested { child: S("second") } + }, + &results.hits[0].result + ); + + Ok(()) + } + + #[meilisearch_test] + async fn test_query_limit(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let results: SearchResults = index.search().with_limit(5).execute().await?; + assert_eq!(results.hits.len(), 5); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_page(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let results: SearchResults = index.search().with_page(2).execute().await?; + assert_eq!(results.page, Some(2)); + assert_eq!(results.hits_per_page, Some(20)); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_hits_per_page(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let results: SearchResults = + index.search().with_hits_per_page(2).execute().await?; + assert_eq!(results.page, Some(1)); + assert_eq!(results.hits_per_page, Some(2)); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_offset(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let results: SearchResults = index.search().with_offset(6).execute().await?; + assert_eq!(results.hits.len(), 4); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_filter(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let results: SearchResults = index + .search() + .with_filter("value = \"The Social Network\"") + .execute() + .await?; + assert_eq!(results.hits.len(), 1); + + let results: SearchResults = index + .search() + .with_filter("NOT value = \"The Social Network\"") + .execute() + .await?; + assert_eq!(results.hits.len(), 9); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_filter_with_array(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let results: SearchResults = index + .search() + .with_array_filter(vec![ + "value = \"The Social Network\"", + "value = \"The Social Network\"", + ]) + .execute() + .await?; + assert_eq!(results.hits.len(), 1); + + Ok(()) + } + + #[meilisearch_test] + async fn test_query_facet_distribution(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let mut query = SearchQuery::new(&index); + query.with_facets(Selectors::All); + let results: SearchResults = index.execute_query(&query).await?; + assert_eq!( + results + .facet_distribution + .unwrap() + .get("kind") + .unwrap() + .get("title") + .unwrap(), + &8 + ); + + let mut query = SearchQuery::new(&index); + query.with_facets(Selectors::Some(&["kind"])); + let results: SearchResults = index.execute_query(&query).await?; + assert_eq!( + results + .facet_distribution + .clone() + .unwrap() + .get("kind") + .unwrap() + .get("title") + .unwrap(), + &8 + ); + assert_eq!( + results + .facet_distribution + .unwrap() + .get("kind") + .unwrap() + .get("text") + .unwrap(), + &2 + ); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_attributes_to_retrieve(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let results: SearchResults = index + .search() + .with_attributes_to_retrieve(Selectors::All) + .execute() + .await?; + assert_eq!(results.hits.len(), 10); + + let mut query = SearchQuery::new(&index); + query.with_attributes_to_retrieve(Selectors::Some(&["kind", "id"])); // omit the "value" field + assert!(index.execute_query::(&query).await.is_err()); // error: missing "value" field + Ok(()) + } + + #[meilisearch_test] + async fn test_query_sort(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let mut query = SearchQuery::new(&index); + query.with_query("harry potter"); + query.with_sort(&["title:desc"]); + let results: SearchResults = index.execute_query(&query).await?; + assert_eq!(results.hits.len(), 7); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_attributes_to_crop(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let mut query = SearchQuery::new(&index); + query.with_query("lorem ipsum"); + query.with_attributes_to_crop(Selectors::All); + let results: SearchResults = index.execute_query(&query).await?; + assert_eq!( + &Document { + id: 0, + value: S("Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do…"), + kind: S("text"), + number: 0, + nested: Nested { child: S("first") } + }, + results.hits[0].formatted_result.as_ref().unwrap() + ); + + let mut query = SearchQuery::new(&index); + query.with_query("lorem ipsum"); + query.with_attributes_to_crop(Selectors::Some(&[("value", Some(5)), ("kind", None)])); + let results: SearchResults = index.execute_query(&query).await?; + assert_eq!( + &Document { + id: 0, + value: S("Lorem ipsum dolor sit amet…"), + kind: S("text"), + number: 0, + nested: Nested { child: S("first") } + }, + results.hits[0].formatted_result.as_ref().unwrap() + ); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_crop_length(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let mut query = SearchQuery::new(&index); + query.with_query("lorem ipsum"); + query.with_attributes_to_crop(Selectors::All); + query.with_crop_length(200); + let results: SearchResults = index.execute_query(&query).await?; + assert_eq!(&Document { + id: 0, + value: S("Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."), + kind: S("text"), + number: 0, + nested: Nested { child: S("first") } + }, + results.hits[0].formatted_result.as_ref().unwrap()); + + let mut query = SearchQuery::new(&index); + query.with_query("lorem ipsum"); + query.with_attributes_to_crop(Selectors::All); + query.with_crop_length(5); + let results: SearchResults = index.execute_query(&query).await?; + assert_eq!( + &Document { + id: 0, + value: S("Lorem ipsum dolor sit amet…"), + kind: S("text"), + number: 0, + nested: Nested { child: S("first") } + }, + results.hits[0].formatted_result.as_ref().unwrap() + ); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_customized_crop_marker(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let mut query = SearchQuery::new(&index); + query.with_query("sed do eiusmod"); + query.with_attributes_to_crop(Selectors::All); + query.with_crop_length(6); + query.with_crop_marker("(ꈍᴗꈍ)"); + + let results: SearchResults = index.execute_query(&query).await?; + + assert_eq!( + &Document { + id: 0, + value: S("(ꈍᴗꈍ)sed do eiusmod tempor incididunt ut(ꈍᴗꈍ)"), + kind: S("text"), + number: 0, + nested: Nested { child: S("first") } + }, + results.hits[0].formatted_result.as_ref().unwrap() + ); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_customized_highlight_pre_tag( + client: Client, + index: Index, + ) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let mut query = SearchQuery::new(&index); + query.with_query("Social"); + query.with_attributes_to_highlight(Selectors::All); + query.with_highlight_pre_tag("(⊃。•́‿•̀。)⊃ "); + query.with_highlight_post_tag(" ⊂(´• ω •`⊂)"); + + let results: SearchResults = index.execute_query(&query).await?; + assert_eq!( + &Document { + id: 2, + value: S("The (⊃。•́‿•̀。)⊃ Social ⊂(´• ω •`⊂) Network"), + kind: S("title"), + number: 20, + nested: Nested { child: S("third") } + }, + results.hits[0].formatted_result.as_ref().unwrap() + ); + + Ok(()) + } + + #[meilisearch_test] + async fn test_query_attributes_to_highlight(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let mut query = SearchQuery::new(&index); + query.with_query("dolor text"); + query.with_attributes_to_highlight(Selectors::All); + let results: SearchResults = index.execute_query(&query).await?; + assert_eq!( + &Document { + id: 1, + value: S("dolor sit amet, consectetur adipiscing elit"), + kind: S("text"), + number: 10, + nested: Nested { child: S("first") } + }, + results.hits[0].formatted_result.as_ref().unwrap(), + ); + + let mut query = SearchQuery::new(&index); + query.with_query("dolor text"); + query.with_attributes_to_highlight(Selectors::Some(&["value"])); + let results: SearchResults = index.execute_query(&query).await?; + assert_eq!( + &Document { + id: 1, + value: S("dolor sit amet, consectetur adipiscing elit"), + kind: S("text"), + number: 10, + nested: Nested { child: S("first") } + }, + results.hits[0].formatted_result.as_ref().unwrap() + ); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_show_matches_position(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let mut query = SearchQuery::new(&index); + query.with_query("dolor text"); + query.with_show_matches_position(true); + let results: SearchResults = index.execute_query(&query).await?; + assert_eq!(results.hits[0].matches_position.as_ref().unwrap().len(), 2); + assert_eq!( + results.hits[0] + .matches_position + .as_ref() + .unwrap() + .get("value") + .unwrap(), + &vec![MatchRange { + start: 0, + length: 5, + indices: None, + }] + ); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_show_ranking_score(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let mut query = SearchQuery::new(&index); + query.with_query("dolor text"); + query.with_show_ranking_score(true); + let results: SearchResults = index.execute_query(&query).await?; + assert!(results.hits[0].ranking_score.is_some()); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_show_ranking_score_details( + client: Client, + index: Index, + ) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let mut query = SearchQuery::new(&index); + query.with_query("dolor text"); + query.with_show_ranking_score_details(true); + let results: SearchResults = index.execute_query(&query).await.unwrap(); + assert!(results.hits[0].ranking_score_details.is_some()); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_show_ranking_score_threshold( + client: Client, + index: Index, + ) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let mut query = SearchQuery::new(&index); + query.with_query("dolor text"); + query.with_ranking_score_threshold(1.0); + let results: SearchResults = index.execute_query(&query).await.unwrap(); + assert!(results.hits.is_empty()); + Ok(()) + } + + #[meilisearch_test] + async fn test_query_locales(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let mut query = SearchQuery::new(&index); + query.with_query("Harry Styles"); + query.with_locales(&["eng"]); + let results: SearchResults = index.execute_query(&query).await.unwrap(); + assert_eq!(results.hits.len(), 7); + Ok(()) + } + + #[meilisearch_test] + async fn test_phrase_search(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let mut query = SearchQuery::new(&index); + query.with_query("harry \"of Fire\""); + let results: SearchResults = index.execute_query(&query).await?; + + assert_eq!(results.hits.len(), 1); + Ok(()) + } + + #[meilisearch_test] + async fn test_matching_strategy_all(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let results = SearchQuery::new(&index) + .with_query("Harry Styles") + .with_matching_strategy(MatchingStrategies::ALL) + .execute::() + .await + .unwrap(); + + assert_eq!(results.hits.len(), 0); + Ok(()) + } + + #[meilisearch_test] + async fn test_matching_strategy_last(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let results = SearchQuery::new(&index) + .with_query("Harry Styles") + .with_matching_strategy(MatchingStrategies::LAST) + .execute::() + .await + .unwrap(); + + assert_eq!(results.hits.len(), 7); + Ok(()) + } + + #[meilisearch_test] + async fn test_matching_strategy_frequency(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let results = SearchQuery::new(&index) + .with_query("Harry Styles") + .with_matching_strategy(MatchingStrategies::FREQUENCY) + .execute::() + .await + .unwrap(); + + assert_eq!(results.hits.len(), 7); + Ok(()) + } + + #[meilisearch_test] + async fn test_distinct(client: Client, index: Index) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let results = SearchQuery::new(&index) + .with_distinct("kind") + .execute::() + .await + .unwrap(); + + assert_eq!(results.hits.len(), 2); + Ok(()) + } + + #[meilisearch_test] + async fn test_generate_tenant_token_from_client( + client: Client, + index: Index, + ) -> Result<(), Error> { + setup_test_index(&client, &index).await?; + + let meilisearch_url = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + let key = KeyBuilder::new() + .with_action(Action::All) + .with_index("*") + .execute(&client) + .await + .unwrap(); + let allowed_client = Client::new(meilisearch_url, Some(key.key)).unwrap(); + + let search_rules = vec![ + json!({ "*": {}}), + json!({ "*": Value::Null }), + json!(["*"]), + json!({ "*": { "filter": "kind = text" } }), + json!([index.uid.to_string()]), + ]; + + for rules in search_rules { + let token = allowed_client + .generate_tenant_token(key.uid.clone(), rules, None, None) + .expect("Cannot generate tenant token."); + + let new_client = Client::new(meilisearch_url, Some(token.clone())).unwrap(); + + let result: SearchResults = new_client + .index(index.uid.to_string()) + .search() + .execute() + .await?; + + assert!(!result.hits.is_empty()); + } + + Ok(()) + } +} diff --git a/backend/vendor/meilisearch-sdk/src/settings.rs b/backend/vendor/meilisearch-sdk/src/settings.rs new file mode 100644 index 000000000..557226280 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/settings.rs @@ -0,0 +1,2697 @@ +use crate::{ + errors::Error, + indexes::Index, + request::{HttpClient, Method}, + task_info::TaskInfo, +}; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq, Copy)] +#[serde(rename_all = "camelCase")] +pub struct PaginationSetting { + pub max_total_hits: usize, +} + +#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct MinWordSizeForTypos { + pub one_typo: Option, + pub two_typos: Option, +} + +#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +#[serde(default)] +pub struct TypoToleranceSettings { + pub enabled: Option, + pub disable_on_attributes: Option>, + pub disable_on_words: Option>, + pub min_word_size_for_typos: Option, +} + +#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq, Copy)] +#[serde(rename_all = "camelCase")] +pub struct FacetingSettings { + pub max_values_per_facet: usize, +} + +#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct LocalizedAttributes { + pub locales: Vec, + pub attribute_patterns: Vec, +} + +/// Struct reprensenting a set of settings. +/// +/// You can build this struct using the builder syntax. +/// +/// # Example +/// +/// ``` +/// # use meilisearch_sdk::settings::Settings; +/// let settings = Settings::new() +/// .with_stop_words(["a", "the", "of"]); +/// +/// // OR +/// +/// let stop_words: Vec = vec!["a".to_string(), "the".to_string(), "of".to_string()]; +/// let mut settings = Settings::new(); +/// settings.stop_words = Some(stop_words); +/// +/// // OR +/// +/// let stop_words: Vec = vec!["a".to_string(), "the".to_string(), "of".to_string()]; +/// let settings = Settings { +/// stop_words: Some(stop_words), +/// ..Settings::new() +/// }; +/// ``` +#[derive(Serialize, Deserialize, Default, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct Settings { + /// List of associated words treated similarly. + #[serde(skip_serializing_if = "Option::is_none")] + pub synonyms: Option>>, + /// List of words ignored by Meilisearch when present in search queries. + #[serde(skip_serializing_if = "Option::is_none")] + pub stop_words: Option>, + /// List of [ranking rules](https://www.meilisearch.com/docs/learn/core_concepts/relevancy#order-of-the-rules) sorted by order of importance. + #[serde(skip_serializing_if = "Option::is_none")] + pub ranking_rules: Option>, + /// Attributes to use for [filtering](https://www.meilisearch.com/docs/learn/advanced/filtering). + #[serde(skip_serializing_if = "Option::is_none")] + pub filterable_attributes: Option>, + /// Attributes to sort. + #[serde(skip_serializing_if = "Option::is_none")] + pub sortable_attributes: Option>, + /// Search returns documents with distinct (different) values of the given field. + #[serde(skip_serializing_if = "Option::is_none")] + pub distinct_attribute: Option>, + /// Fields in which to search for matching query words sorted by order of importance. + #[serde(skip_serializing_if = "Option::is_none")] + pub searchable_attributes: Option>, + /// Fields displayed in the returned documents. + #[serde(skip_serializing_if = "Option::is_none")] + pub displayed_attributes: Option>, + /// Pagination settings. + #[serde(skip_serializing_if = "Option::is_none")] + pub pagination: Option, + /// Faceting settings. + #[serde(skip_serializing_if = "Option::is_none")] + pub faceting: Option, + /// TypoTolerance settings + #[serde(skip_serializing_if = "Option::is_none")] + pub typo_tolerance: Option, + /// Dictionary settings. + #[serde(skip_serializing_if = "Option::is_none")] + pub dictionary: Option>, + /// Proximity precision settings. + #[serde(skip_serializing_if = "Option::is_none")] + pub proximity_precision: Option, + /// SearchCutoffMs settings. + #[serde(skip_serializing_if = "Option::is_none")] + pub search_cutoff_ms: Option, + /// Configure strings as custom separator tokens indicating where a word ends and begins. + #[serde(skip_serializing_if = "Option::is_none")] + pub separator_tokens: Option>, + /// Remove tokens from Meilisearch's default [list of word separators](https://www.meilisearch.com/docs/learn/engine/datatypes#string). + #[serde(skip_serializing_if = "Option::is_none")] + pub non_separator_tokens: Option>, + /// LocalizedAttributes settings. + #[serde(skip_serializing_if = "Option::is_none")] + pub localized_attributes: Option>, +} + +#[allow(missing_docs)] +impl Settings { + /// Create undefined settings. + #[must_use] + pub fn new() -> Settings { + Self::default() + } + + #[must_use] + pub fn with_synonyms(self, synonyms: HashMap) -> Settings + where + S: AsRef, + V: AsRef, + U: IntoIterator, + { + Settings { + synonyms: Some( + synonyms + .into_iter() + .map(|(key, value)| { + ( + key.as_ref().to_string(), + value.into_iter().map(|v| v.as_ref().to_string()).collect(), + ) + }) + .collect(), + ), + ..self + } + } + + #[must_use] + pub fn with_stop_words( + self, + stop_words: impl IntoIterator>, + ) -> Settings { + Settings { + stop_words: Some( + stop_words + .into_iter() + .map(|v| v.as_ref().to_string()) + .collect(), + ), + ..self + } + } + + #[must_use] + pub fn with_pagination(self, pagination_settings: PaginationSetting) -> Settings { + Settings { + pagination: Some(pagination_settings), + ..self + } + } + + #[must_use] + pub fn with_typo_tolerance(self, typo_tolerance_settings: TypoToleranceSettings) -> Settings { + Settings { + typo_tolerance: Some(typo_tolerance_settings), + ..self + } + } + + #[must_use] + pub fn with_ranking_rules( + self, + ranking_rules: impl IntoIterator>, + ) -> Settings { + Settings { + ranking_rules: Some( + ranking_rules + .into_iter() + .map(|v| v.as_ref().to_string()) + .collect(), + ), + ..self + } + } + + #[must_use] + pub fn with_filterable_attributes( + self, + filterable_attributes: impl IntoIterator>, + ) -> Settings { + Settings { + filterable_attributes: Some( + filterable_attributes + .into_iter() + .map(|v| v.as_ref().to_string()) + .collect(), + ), + ..self + } + } + + #[must_use] + pub fn with_sortable_attributes( + self, + sortable_attributes: impl IntoIterator>, + ) -> Settings { + Settings { + sortable_attributes: Some( + sortable_attributes + .into_iter() + .map(|v| v.as_ref().to_string()) + .collect(), + ), + ..self + } + } + + #[must_use] + pub fn with_distinct_attribute(self, distinct_attribute: Option>) -> Settings { + Settings { + distinct_attribute: Some( + distinct_attribute.map(|distinct| distinct.as_ref().to_string()), + ), + ..self + } + } + + #[must_use] + pub fn with_searchable_attributes( + self, + searchable_attributes: impl IntoIterator>, + ) -> Settings { + Settings { + searchable_attributes: Some( + searchable_attributes + .into_iter() + .map(|v| v.as_ref().to_string()) + .collect(), + ), + ..self + } + } + + #[must_use] + pub fn with_displayed_attributes( + self, + displayed_attributes: impl IntoIterator>, + ) -> Settings { + Settings { + displayed_attributes: Some( + displayed_attributes + .into_iter() + .map(|v| v.as_ref().to_string()) + .collect(), + ), + ..self + } + } + + #[must_use] + pub fn with_faceting(self, faceting: &FacetingSettings) -> Settings { + Settings { + faceting: Some(*faceting), + ..self + } + } + + #[must_use] + pub fn with_dictionary( + self, + dictionary: impl IntoIterator>, + ) -> Settings { + Settings { + dictionary: Some( + dictionary + .into_iter() + .map(|v| v.as_ref().to_string()) + .collect(), + ), + ..self + } + } + + pub fn with_proximity_precision(self, proximity_precision: impl AsRef) -> Settings { + Settings { + proximity_precision: Some(proximity_precision.as_ref().to_string()), + ..self + } + } + + pub fn with_search_cutoff(self, search_cutoff_ms: u64) -> Settings { + Settings { + search_cutoff_ms: Some(search_cutoff_ms), + ..self + } + } + + #[must_use] + pub fn with_separation_tokens( + self, + separator_tokens: impl IntoIterator>, + ) -> Settings { + Settings { + separator_tokens: Some( + separator_tokens + .into_iter() + .map(|v| v.as_ref().to_string()) + .collect(), + ), + ..self + } + } + + #[must_use] + pub fn with_non_separation_tokens( + self, + non_separator_tokens: impl IntoIterator>, + ) -> Settings { + Settings { + non_separator_tokens: Some( + non_separator_tokens + .into_iter() + .map(|v| v.as_ref().to_string()) + .collect(), + ), + ..self + } + } + + #[must_use] + pub fn with_localized_attributes( + self, + localized_attributes: impl IntoIterator, + ) -> Settings { + Settings { + localized_attributes: Some(localized_attributes.into_iter().collect()), + ..self + } + } +} + +impl Index { + /// Get [Settings] of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_settings", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("get_settings"); + /// + /// let settings = index.get_settings().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_settings(&self) -> Result { + self.client + .http_client + .request::<(), (), Settings>( + &format!("{}/indexes/{}/settings", self.client.host, self.uid), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get [synonyms](https://www.meilisearch.com/docs/reference/api/settings#get-synonyms) of the [Index]. + /// + /// # Example + /// + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_synonyms", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("get_synonyms"); + /// + /// let synonyms = index.get_synonyms().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_synonyms(&self) -> Result>, Error> { + self.client + .http_client + .request::<(), (), HashMap>>( + &format!( + "{}/indexes/{}/settings/synonyms", + self.client.host, self.uid + ), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get [pagination](https://www.meilisearch.com/docs/reference/api/settings#pagination) of the [Index]. + /// + /// # Example + /// + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_pagination", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("get_pagination"); + /// + /// let pagination = index.get_pagination().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_pagination(&self) -> Result { + self.client + .http_client + .request::<(), (), PaginationSetting>( + &format!( + "{}/indexes/{}/settings/pagination", + self.client.host, self.uid + ), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get [stop-words](https://www.meilisearch.com/docs/reference/api/settings#stop-words) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_stop_words", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("get_stop_words"); + /// + /// let stop_words = index.get_stop_words().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_stop_words(&self) -> Result, Error> { + self.client + .http_client + .request::<(), (), Vec>( + &format!( + "{}/indexes/{}/settings/stop-words", + self.client.host, self.uid + ), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get [ranking rules](https://www.meilisearch.com/docs/reference/api/settings#ranking-rules) of the [Index]. + /// + /// # Example + /// + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_ranking_rules", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("get_ranking_rules"); + /// + /// let ranking_rules = index.get_ranking_rules().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_ranking_rules(&self) -> Result, Error> { + self.client + .http_client + .request::<(), (), Vec>( + &format!( + "{}/indexes/{}/settings/ranking-rules", + self.client.host, self.uid + ), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get [filterable attributes](https://www.meilisearch.com/docs/reference/api/settings#filterable-attributes) of the [Index]. + /// + /// # Example + /// + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_filterable_attributes", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("get_filterable_attributes"); + /// + /// let filterable_attributes = index.get_filterable_attributes().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_filterable_attributes(&self) -> Result, Error> { + self.client + .http_client + .request::<(), (), Vec>( + &format!( + "{}/indexes/{}/settings/filterable-attributes", + self.client.host, self.uid + ), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get [sortable attributes](https://www.meilisearch.com/docs/reference/api/settings#sortable-attributes) of the [Index]. + /// + /// # Example + /// + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_sortable_attributes", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("get_sortable_attributes"); + /// + /// let sortable_attributes = index.get_sortable_attributes().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_sortable_attributes(&self) -> Result, Error> { + self.client + .http_client + .request::<(), (), Vec>( + &format!( + "{}/indexes/{}/settings/sortable-attributes", + self.client.host, self.uid + ), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get the [distinct attribute](https://www.meilisearch.com/docs/reference/api/settings#distinct-attribute) of the [Index]. + /// + /// # Example + /// + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_distinct_attribute", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("get_distinct_attribute"); + /// + /// let distinct_attribute = index.get_distinct_attribute().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_distinct_attribute(&self) -> Result, Error> { + self.client + .http_client + .request::<(), (), Option>( + &format!( + "{}/indexes/{}/settings/distinct-attribute", + self.client.host, self.uid + ), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get [searchable attributes](https://www.meilisearch.com/docs/reference/api/settings#searchable-attributes) of the [Index]. + /// + /// # Example + /// + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_searchable_attributes", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("get_searchable_attributes"); + /// + /// let searchable_attributes = index.get_searchable_attributes().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_searchable_attributes(&self) -> Result, Error> { + self.client + .http_client + .request::<(), (), Vec>( + &format!( + "{}/indexes/{}/settings/searchable-attributes", + self.client.host, self.uid + ), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get [displayed attributes](https://www.meilisearch.com/docs/reference/api/settings#displayed-attributes) of the [Index]. + /// + /// # Example + /// + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_displayed_attributes", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("get_displayed_attributes"); + /// + /// let displayed_attributes = index.get_displayed_attributes().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_displayed_attributes(&self) -> Result, Error> { + self.client + .http_client + .request::<(), (), Vec>( + &format!( + "{}/indexes/{}/settings/displayed-attributes", + self.client.host, self.uid + ), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get [faceting](https://www.meilisearch.com/docs/reference/api/settings#faceting) settings of the [Index]. + /// + /// # Example + /// + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_faceting", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("get_faceting"); + /// + /// let faceting = index.get_faceting().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_faceting(&self) -> Result { + self.client + .http_client + .request::<(), (), FacetingSettings>( + &format!( + "{}/indexes/{}/settings/faceting", + self.client.host, self.uid + ), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get [dictionary](https://www.meilisearch.com/docs/reference/api/settings#dictionary) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_dictionary", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("get_dictionary"); + /// + /// let dictionary = index.get_dictionary().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_dictionary(&self) -> Result, Error> { + self.client + .http_client + .request::<(), (), Vec>( + &format!( + "{}/indexes/{}/settings/dictionary", + self.client.host, self.uid + ), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get [proximity_precision](https://www.meilisearch.com/docs/reference/api/settings#proximity-precision) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_proximity_precision", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("get_proximity_precision"); + /// + /// let proximity_precision = index.get_proximity_precision().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_proximity_precision(&self) -> Result { + self.client + .http_client + .request::<(), (), String>( + &format!( + "{}/indexes/{}/settings/proximity-precision", + self.client.host, self.uid + ), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get [typo tolerance](https://www.meilisearch.com/docs/learn/configuration/typo_tolerance#typo-tolerance) of the [Index]. + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_typo_tolerance", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("get_typo_tolerance"); + /// + /// let typo_tolerance = index.get_typo_tolerance().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_typo_tolerance(&self) -> Result { + self.client + .http_client + .request::<(), (), TypoToleranceSettings>( + &format!( + "{}/indexes/{}/settings/typo-tolerance", + self.client.host, self.uid + ), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get [search cutoff](https://www.meilisearch.com/docs/reference/api/settings#search-cutoff) settings of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_search_cutoff_ms", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("get_search_cutoff_ms"); + /// + /// let task = index.get_search_cutoff_ms().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_search_cutoff_ms(&self) -> Result, Error> { + self.client + .http_client + .request::<(), (), Option>( + &format!( + "{}/indexes/{}/settings/search-cutoff-ms", + self.client.host, self.uid + ), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get [separator token](https://www.meilisearch.com/docs/reference/api/settings#separator-tokens) of the [Index]. + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_separator_tokens", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("get_separator_tokens"); + /// + /// let separator_tokens = index.get_separator_tokens().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_separator_tokens(&self) -> Result, Error> { + self.client + .http_client + .request::<(), (), Vec>( + &format!( + "{}/indexes/{}/settings/separator-tokens", + self.client.host, self.uid + ), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get [non separator token](https://www.meilisearch.com/docs/reference/api/settings#non-separator-tokens) of the [Index]. + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_non_separator_tokens", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("get_non_separator_tokens"); + /// + /// let non_separator_tokens = index.get_non_separator_tokens().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_non_separator_tokens(&self) -> Result, Error> { + self.client + .http_client + .request::<(), (), Vec>( + &format!( + "{}/indexes/{}/settings/non-separator-tokens", + self.client.host, self.uid + ), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Get [localized attributes](https://www.meilisearch.com/docs/reference/api/settings#localized-attributes-object) settings of the [Index]. + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::LocalizedAttributes}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("get_localized_attributes", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("get_localized_attributes"); + /// + /// let localized_attributes = index.get_localized_attributes().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn get_localized_attributes( + &self, + ) -> Result>, Error> { + self.client + .http_client + .request::<(), (), Option>>( + &format!( + "{}/indexes/{}/settings/localized-attributes", + self.client.host, self.uid + ), + Method::Get { query: () }, + 200, + ) + .await + } + + /// Update [settings](../settings/struct.Settings) of the [Index]. + /// + /// Updates in the settings are partial. This means that any parameters corresponding to a `None` value will be left unchanged. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("set_settings", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("set_settings"); + /// + /// let stop_words = vec![String::from("a"), String::from("the"), String::from("of")]; + /// let settings = Settings::new() + /// .with_stop_words(stop_words.clone()) + /// .with_pagination(PaginationSetting {max_total_hits: 100} + /// ); + /// + /// let task = index.set_settings(&settings).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_settings(&self, settings: &Settings) -> Result { + self.client + .http_client + .request::<(), &Settings, TaskInfo>( + &format!("{}/indexes/{}/settings", self.client.host, self.uid), + Method::Patch { + query: (), + body: settings, + }, + 202, + ) + .await + } + + /// Update [synonyms](https://www.meilisearch.com/docs/reference/api/settings#synonyms) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("set_synonyms", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("set_synonyms"); + /// + /// let mut synonyms = std::collections::HashMap::new(); + /// synonyms.insert(String::from("wolverine"), vec![String::from("xmen"), String::from("logan")]); + /// synonyms.insert(String::from("logan"), vec![String::from("xmen"), String::from("wolverine")]); + /// synonyms.insert(String::from("wow"), vec![String::from("world of warcraft")]); + /// + /// let task = index.set_synonyms(&synonyms).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_synonyms( + &self, + synonyms: &HashMap>, + ) -> Result { + self.client + .http_client + .request::<(), &HashMap>, TaskInfo>( + &format!( + "{}/indexes/{}/settings/synonyms", + self.client.host, self.uid + ), + Method::Put { + query: (), + body: synonyms, + }, + 202, + ) + .await + } + + /// Update [pagination](https://www.meilisearch.com/docs/reference/api/settings#pagination) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("set_pagination", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("set_pagination"); + /// + /// let pagination = PaginationSetting {max_total_hits:100}; + /// let task = index.set_pagination(pagination).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_pagination(&self, pagination: PaginationSetting) -> Result { + self.client + .http_client + .request::<(), &PaginationSetting, TaskInfo>( + &format!( + "{}/indexes/{}/settings/pagination", + self.client.host, self.uid + ), + Method::Patch { + query: (), + body: &pagination, + }, + 202, + ) + .await + } + + /// Update [stop-words](https://www.meilisearch.com/docs/reference/api/settings#stop-words) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("set_stop_words", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("set_stop_words"); + /// + /// let stop_words = ["the", "of", "to"]; + /// let task = index.set_stop_words(&stop_words).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_stop_words( + &self, + stop_words: impl IntoIterator>, + ) -> Result { + self.client + .http_client + .request::<(), Vec, TaskInfo>( + &format!( + "{}/indexes/{}/settings/stop-words", + self.client.host, self.uid + ), + Method::Put { + query: (), + body: stop_words + .into_iter() + .map(|v| v.as_ref().to_string()) + .collect(), + }, + 202, + ) + .await + } + + /// Update [ranking rules](https://www.meilisearch.com/docs/reference/api/settings#ranking-rules) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("set_ranking_rules", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("set_ranking_rules"); + /// + /// let ranking_rules = [ + /// "words", + /// "typo", + /// "proximity", + /// "attribute", + /// "sort", + /// "exactness", + /// "release_date:asc", + /// "rank:desc", + /// ]; + /// let task = index.set_ranking_rules(ranking_rules).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_ranking_rules( + &self, + ranking_rules: impl IntoIterator>, + ) -> Result { + self.client + .http_client + .request::<(), Vec, TaskInfo>( + &format!( + "{}/indexes/{}/settings/ranking-rules", + self.client.host, self.uid + ), + Method::Put { + query: (), + body: ranking_rules + .into_iter() + .map(|v| v.as_ref().to_string()) + .collect(), + }, + 202, + ) + .await + } + + /// Update [filterable attributes](https://www.meilisearch.com/docs/reference/api/settings#filterable-attributes) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("set_filterable_attributes", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("set_filterable_attributes"); + /// + /// let filterable_attributes = ["genre", "director"]; + /// let task = index.set_filterable_attributes(&filterable_attributes).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_filterable_attributes( + &self, + filterable_attributes: impl IntoIterator>, + ) -> Result { + self.client + .http_client + .request::<(), Vec, TaskInfo>( + &format!( + "{}/indexes/{}/settings/filterable-attributes", + self.client.host, self.uid + ), + Method::Put { + query: (), + body: filterable_attributes + .into_iter() + .map(|v| v.as_ref().to_string()) + .collect(), + }, + 202, + ) + .await + } + + /// Update [sortable attributes](https://www.meilisearch.com/docs/reference/api/settings#sortable-attributes) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("set_sortable_attributes", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("set_sortable_attributes"); + /// + /// let sortable_attributes = ["genre", "director"]; + /// let task = index.set_sortable_attributes(&sortable_attributes).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_sortable_attributes( + &self, + sortable_attributes: impl IntoIterator>, + ) -> Result { + self.client + .http_client + .request::<(), Vec, TaskInfo>( + &format!( + "{}/indexes/{}/settings/sortable-attributes", + self.client.host, self.uid + ), + Method::Put { + query: (), + body: sortable_attributes + .into_iter() + .map(|v| v.as_ref().to_string()) + .collect(), + }, + 202, + ) + .await + } + + /// Update the [distinct attribute](https://www.meilisearch.com/docs/reference/api/settings#distinct-attribute) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("set_distinct_attribute", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("set_distinct_attribute"); + /// + /// let task = index.set_distinct_attribute("movie_id").await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_distinct_attribute( + &self, + distinct_attribute: impl AsRef, + ) -> Result { + self.client + .http_client + .request::<(), String, TaskInfo>( + &format!( + "{}/indexes/{}/settings/distinct-attribute", + self.client.host, self.uid + ), + Method::Put { + query: (), + body: distinct_attribute.as_ref().to_string(), + }, + 202, + ) + .await + } + + /// Update [searchable attributes](https://www.meilisearch.com/docs/reference/api/settings#searchable-attributes) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("set_searchable_attributes", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("set_searchable_attributes"); + /// + /// let task = index.set_searchable_attributes(["title", "description", "uid"]).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_searchable_attributes( + &self, + searchable_attributes: impl IntoIterator>, + ) -> Result { + self.client + .http_client + .request::<(), Vec, TaskInfo>( + &format!( + "{}/indexes/{}/settings/searchable-attributes", + self.client.host, self.uid + ), + Method::Put { + query: (), + body: searchable_attributes + .into_iter() + .map(|v| v.as_ref().to_string()) + .collect(), + }, + 202, + ) + .await + } + + /// Update [displayed attributes](https://www.meilisearch.com/docs/reference/api/settings#displayed-attributes) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("set_displayed_attributes", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("set_displayed_attributes"); + /// + /// let task = index.set_displayed_attributes(["title", "description", "release_date", "rank", "poster"]).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_displayed_attributes( + &self, + displayed_attributes: impl IntoIterator>, + ) -> Result { + self.client + .http_client + .request::<(), Vec, TaskInfo>( + &format!( + "{}/indexes/{}/settings/displayed-attributes", + self.client.host, self.uid + ), + Method::Put { + query: (), + body: displayed_attributes + .into_iter() + .map(|v| v.as_ref().to_string()) + .collect(), + }, + 202, + ) + .await + } + + /// Update [faceting](https://www.meilisearch.com/docs/reference/api/settings#faceting) settings of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings, settings::FacetingSettings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("set_faceting", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("set_faceting"); + /// + /// let mut faceting = FacetingSettings { + /// max_values_per_facet: 12, + /// }; + /// + /// let task = index.set_faceting(&faceting).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_faceting(&self, faceting: &FacetingSettings) -> Result { + self.client + .http_client + .request::<(), &FacetingSettings, TaskInfo>( + &format!( + "{}/indexes/{}/settings/faceting", + self.client.host, self.uid + ), + Method::Patch { + query: (), + body: faceting, + }, + 202, + ) + .await + } + + /// Update [dictionary](https://www.meilisearch.com/docs/reference/api/settings#dictionary) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("set_dictionary", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("set_dictionary"); + /// + /// let task = index.set_dictionary(["J. K.", "J. R. R."]).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_dictionary( + &self, + dictionary: impl IntoIterator>, + ) -> Result { + self.client + .http_client + .request::<(), Vec, TaskInfo>( + &format!( + "{}/indexes/{}/settings/dictionary", + self.client.host, self.uid + ), + Method::Put { + query: (), + body: dictionary + .into_iter() + .map(|v| v.as_ref().to_string()) + .collect(), + }, + 202, + ) + .await + } + + /// Update [typo tolerance](https://www.meilisearch.com/docs/learn/configuration/typo_tolerance#typo-tolerance) settings of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings, settings::{TypoToleranceSettings, MinWordSizeForTypos}}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("set_typo_tolerance", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("set_typo_tolerance"); + /// + /// let typo_tolerance = TypoToleranceSettings{ + /// enabled: Some(true), + /// disable_on_attributes: Some(vec!["title".to_string()]), + /// disable_on_words: Some(vec![]), + /// min_word_size_for_typos: Some(MinWordSizeForTypos::default()), + /// }; + /// + /// let task = index.set_typo_tolerance(&typo_tolerance).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_typo_tolerance( + &self, + typo_tolerance: &TypoToleranceSettings, + ) -> Result { + self.client + .http_client + .request::<(), &TypoToleranceSettings, TaskInfo>( + &format!( + "{}/indexes/{}/settings/typo-tolerance", + self.client.host, self.uid + ), + Method::Patch { + query: (), + body: typo_tolerance, + }, + 202, + ) + .await + } + + /// Update [separator tokens](https://www.meilisearch.com/docs/reference/api/settings#separator-tokens) settings of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings, settings::{TypoToleranceSettings, MinWordSizeForTypos}}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("set_separator_tokens", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("set_separator_tokens"); + /// + /// let separator_token: Vec = vec!["@".to_string(), "#".to_string()]; + /// + /// let task = index.set_separator_tokens(&separator_token).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_separator_tokens( + &self, + separator_token: &Vec, + ) -> Result { + self.client + .http_client + .request::<(), &Vec, TaskInfo>( + &format!( + "{}/indexes/{}/settings/separator-tokens", + self.client.host, self.uid + ), + Method::Put { + query: (), + body: separator_token, + }, + 202, + ) + .await + } + + /// Update [non separator tokens](https://www.meilisearch.com/docs/reference/api/settings#non-separator-tokens) settings of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings, settings::{TypoToleranceSettings, MinWordSizeForTypos}}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("set_non_separator_tokens", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("set_non_separator_tokens"); + /// + /// let non_separator_token: Vec = vec!["@".to_string(), "#".to_string()]; + /// + /// let task = index.set_non_separator_tokens(&non_separator_token).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_non_separator_tokens( + &self, + non_separator_token: &Vec, + ) -> Result { + self.client + .http_client + .request::<(), &Vec, TaskInfo>( + &format!( + "{}/indexes/{}/settings/non-separator-tokens", + self.client.host, self.uid + ), + Method::Put { + query: (), + body: non_separator_token, + }, + 202, + ) + .await + } + + /// Update [proximity-precision](https://www.meilisearch.com/docs/learn/configuration/proximity-precision) settings of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("set_proximity_precision", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("set_proximity_precision"); + /// + /// let task = index.set_proximity_precision("byWord".to_string()).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_proximity_precision( + &self, + proximity_precision: String, + ) -> Result { + self.client + .http_client + .request::<(), String, TaskInfo>( + &format!( + "{}/indexes/{}/settings/proximity-precision", + self.client.host, self.uid + ), + Method::Put { + query: (), + body: proximity_precision, + }, + 202, + ) + .await + } + + /// Update [search cutoff](https://www.meilisearch.com/docs/reference/api/settings#search-cutoff) settings of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("update_search_cutoff_ms", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("update_search_cutoff_ms"); + /// + /// let task = index.set_search_cutoff_ms(Some(150)).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_search_cutoff_ms(&self, ms: Option) -> Result { + self.client + .http_client + .request::<(), Option, TaskInfo>( + &format!( + "{}/indexes/{}/settings/search-cutoff-ms", + self.client.host, self.uid + ), + Method::Put { + body: ms, + query: (), + }, + 202, + ) + .await + } + + /// Update [localized attributes](https://www.meilisearch.com/docs/reference/api/settings#localized-attributes-object) settings of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings, settings::{LocalizedAttributes}}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("set_localized_attributes", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("set_localized_attributes"); + /// + /// let localized_attributes = vec![LocalizedAttributes { + /// locales: vec!["jpn".to_string()], + /// attribute_patterns: vec!["*_ja".to_string()], + /// }]; + /// + /// let task = index.set_localized_attributes(&localized_attributes).await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn set_localized_attributes( + &self, + localized_attributes: &Vec, + ) -> Result { + self.client + .http_client + .request::<(), &Vec, TaskInfo>( + &format!( + "{}/indexes/{}/settings/localized-attributes", + self.client.host, self.uid + ), + Method::Put { + query: (), + body: localized_attributes, + }, + 202, + ) + .await + } + + /// Reset [Settings] of the [Index]. + /// + /// All settings will be reset to their [default value](https://www.meilisearch.com/docs/reference/api/settings#reset-settings). + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_settings", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("reset_settings"); + /// + /// let task = index.reset_settings().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_settings(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!("{}/indexes/{}/settings", self.client.host, self.uid), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Reset [synonyms](https://www.meilisearch.com/docs/reference/api/settings#synonyms) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_synonyms", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("reset_synonyms"); + /// + /// let task = index.reset_synonyms().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_synonyms(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/settings/synonyms", + self.client.host, self.uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Reset [pagination](https://www.meilisearch.com/docs/learn/configuration/settings#pagination) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_pagination", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("reset_pagination"); + /// + /// let task = index.reset_pagination().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_pagination(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/settings/pagination", + self.client.host, self.uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } + /// Reset [stop-words](https://www.meilisearch.com/docs/reference/api/settings#stop-words) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_stop_words", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("reset_stop_words"); + /// + /// let task = index.reset_stop_words().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_stop_words(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/settings/stop-words", + self.client.host, self.uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Reset [ranking rules](https://www.meilisearch.com/docs/learn/core_concepts/relevancy#ranking-rules) of the [Index] to default value. + /// + /// **Default value: `["words", "typo", "proximity", "attribute", "sort", "exactness"]`.** + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_ranking_rules", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("reset_ranking_rules"); + /// + /// let task = index.reset_ranking_rules().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_ranking_rules(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/settings/ranking-rules", + self.client.host, self.uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Reset [filterable attributes](https://www.meilisearch.com/docs/reference/api/settings#filterable-attributes) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_filterable_attributes", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("reset_filterable_attributes"); + /// + /// let task = index.reset_filterable_attributes().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_filterable_attributes(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/settings/filterable-attributes", + self.client.host, self.uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Reset [sortable attributes](https://www.meilisearch.com/docs/reference/api/settings#sortable-attributes) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_sortable_attributes", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("reset_sortable_attributes"); + /// + /// let task = index.reset_sortable_attributes().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_sortable_attributes(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/settings/sortable-attributes", + self.client.host, self.uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Reset the [distinct attribute](https://www.meilisearch.com/docs/reference/api/settings#distinct-attribute) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_distinct_attribute", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("reset_distinct_attribute"); + /// + /// let task = index.reset_distinct_attribute().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_distinct_attribute(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/settings/distinct-attribute", + self.client.host, self.uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Reset [searchable attributes](https://www.meilisearch.com/docs/learn/configuration/displayed_searchable_attributes#searchable-fields) of + /// the [Index] (enable all attributes). + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_searchable_attributes", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("reset_searchable_attributes"); + /// + /// let task = index.reset_searchable_attributes().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_searchable_attributes(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/settings/searchable-attributes", + self.client.host, self.uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Reset [displayed attributes](https://www.meilisearch.com/docs/reference/api/settings#displayed-attributes) of the [Index] (enable all attributes). + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_displayed_attributes", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("reset_displayed_attributes"); + /// + /// let task = index.reset_displayed_attributes().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_displayed_attributes(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/settings/displayed-attributes", + self.client.host, self.uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Reset [faceting](https://www.meilisearch.com/docs/reference/api/settings#faceting) settings of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_faceting", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("reset_faceting"); + /// + /// let task = index.reset_faceting().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_faceting(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/settings/faceting", + self.client.host, self.uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Reset [dictionary](https://www.meilisearch.com/docs/reference/api/settings#dictionary) of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_dictionary", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("reset_dictionary"); + /// + /// let task = index.reset_dictionary().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_dictionary(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/settings/dictionary", + self.client.host, self.uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Reset [typo tolerance](https://www.meilisearch.com/docs/learn/configuration/typo_tolerance#typo-tolerance) settings of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_typo_tolerance", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("reset_typo_tolerance"); + /// + /// let task = index.reset_typo_tolerance().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_typo_tolerance(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/settings/typo-tolerance", + self.client.host, self.uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Reset [proximity precision](https://www.meilisearch.com/docs/learn/configuration/typo_tolerance#typo-tolerance) settings of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_proximity_precision", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("reset_proximity_precision"); + /// + /// let task = index.reset_proximity_precision().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_proximity_precision(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/settings/proximity-precision", + self.client.host, self.uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Reset [search cutoff](https://www.meilisearch.com/docs/reference/api/settings#search-cutoff) settings of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_search_cutoff_ms", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("reset_search_cutoff_ms"); + /// + /// let task = index.reset_search_cutoff_ms().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_search_cutoff_ms(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/settings/search-cutoff-ms", + self.client.host, self.uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Reset [search cutoff](https://www.meilisearch.com/docs/reference/api/settings#search-cutoff) settings of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_separator_tokens", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("reset_separator_tokens"); + /// + /// let task = index.reset_separator_tokens().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_separator_tokens(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/settings/separator-tokens", + self.client.host, self.uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Reset [non separator tokens](https://www.meilisearch.com/docs/reference/api/settings#non-separator-tokens) settings of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_non_separator_tokens", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let mut index = client.index("reset_non_separator_tokens"); + /// + /// let task = index.reset_non_separator_tokens().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_non_separator_tokens(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/settings/non-separator-tokens", + self.client.host, self.uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } + + /// Reset [localized attributes](https://www.meilisearch.com/docs/reference/api/settings#localized-attributes-object) settings of the [Index]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, settings::Settings}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # client.create_index("reset_localized_attributes", None).await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// let index = client.index("reset_localized_attributes"); + /// + /// let task = index.reset_localized_attributes().await.unwrap(); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn reset_localized_attributes(&self) -> Result { + self.client + .http_client + .request::<(), (), TaskInfo>( + &format!( + "{}/indexes/{}/settings/localized-attributes", + self.client.host, self.uid + ), + Method::Delete { query: () }, + 202, + ) + .await + } +} + +#[cfg(test)] +mod tests { + use super::*; + + use crate::client::*; + use meilisearch_test_macro::meilisearch_test; + + #[meilisearch_test] + async fn test_set_faceting_settings(client: Client, index: Index) { + let faceting = FacetingSettings { + max_values_per_facet: 5, + }; + let settings = Settings::new().with_faceting(&faceting); + + let task_info = index.set_settings(&settings).await.unwrap(); + client.wait_for_task(task_info, None, None).await.unwrap(); + + let res = index.get_faceting().await.unwrap(); + + assert_eq!(faceting, res); + } + + #[meilisearch_test] + async fn test_get_faceting(index: Index) { + let faceting = FacetingSettings { + max_values_per_facet: 100, + }; + + let res = index.get_faceting().await.unwrap(); + + assert_eq!(faceting, res); + } + + #[meilisearch_test] + async fn test_set_faceting(client: Client, index: Index) { + let faceting = FacetingSettings { + max_values_per_facet: 5, + }; + let task_info = index.set_faceting(&faceting).await.unwrap(); + client.wait_for_task(task_info, None, None).await.unwrap(); + + let res = index.get_faceting().await.unwrap(); + + assert_eq!(faceting, res); + } + + #[meilisearch_test] + async fn test_reset_faceting(client: Client, index: Index) { + let task_info = index.reset_faceting().await.unwrap(); + client.wait_for_task(task_info, None, None).await.unwrap(); + let faceting = FacetingSettings { + max_values_per_facet: 100, + }; + + let res = index.get_faceting().await.unwrap(); + + assert_eq!(faceting, res); + } + + #[meilisearch_test] + async fn test_get_dictionary(index: Index) { + let dictionary: Vec = vec![]; + + let res = index.get_dictionary().await.unwrap(); + + assert_eq!(dictionary, res); + } + + #[meilisearch_test] + async fn test_set_dictionary(client: Client, index: Index) { + let dictionary: Vec<&str> = vec!["J. K.", "J. R. R."]; + let task_info = index.set_dictionary(&dictionary).await.unwrap(); + client.wait_for_task(task_info, None, None).await.unwrap(); + + let res = index.get_dictionary().await.unwrap(); + + assert_eq!(dictionary, res); + } + + #[meilisearch_test] + async fn test_set_empty_dictionary(client: Client, index: Index) { + let dictionary: Vec<&str> = vec![]; + let task_info = index.set_dictionary(&dictionary).await.unwrap(); + client.wait_for_task(task_info, None, None).await.unwrap(); + + let res = index.get_dictionary().await.unwrap(); + + assert_eq!(dictionary, res); + } + + #[meilisearch_test] + async fn test_reset_dictionary(client: Client, index: Index) { + let dictionary: Vec<&str> = vec![]; + let task_info = index.reset_dictionary().await.unwrap(); + client.wait_for_task(task_info, None, None).await.unwrap(); + + let res = index.get_dictionary().await.unwrap(); + + assert_eq!(dictionary, res); + } + + #[meilisearch_test] + async fn test_get_pagination(index: Index) { + let pagination = PaginationSetting { + max_total_hits: 1000, + }; + + let res = index.get_pagination().await.unwrap(); + + assert_eq!(pagination, res); + } + + #[meilisearch_test] + async fn test_set_pagination(index: Index) { + let pagination = PaginationSetting { max_total_hits: 11 }; + let task = index.set_pagination(pagination).await.unwrap(); + index.wait_for_task(task, None, None).await.unwrap(); + + let res = index.get_pagination().await.unwrap(); + + assert_eq!(pagination, res); + } + + #[meilisearch_test] + async fn test_reset_pagination(index: Index) { + let pagination = PaginationSetting { max_total_hits: 10 }; + let default = PaginationSetting { + max_total_hits: 1000, + }; + + let task = index.set_pagination(pagination).await.unwrap(); + index.wait_for_task(task, None, None).await.unwrap(); + + let reset_task = index.reset_pagination().await.unwrap(); + index.wait_for_task(reset_task, None, None).await.unwrap(); + + let res = index.get_pagination().await.unwrap(); + + assert_eq!(default, res); + } + + #[meilisearch_test] + async fn test_get_typo_tolerance(index: Index) { + let expected = TypoToleranceSettings { + enabled: Some(true), + disable_on_attributes: Some(vec![]), + disable_on_words: Some(vec![]), + min_word_size_for_typos: Some(MinWordSizeForTypos { + one_typo: Some(5), + two_typos: Some(9), + }), + }; + + let res = index.get_typo_tolerance().await.unwrap(); + + assert_eq!(expected, res); + } + + #[meilisearch_test] + async fn test_set_typo_tolerance(client: Client, index: Index) { + let expected = TypoToleranceSettings { + enabled: Some(true), + disable_on_attributes: Some(vec!["title".to_string()]), + disable_on_words: Some(vec![]), + min_word_size_for_typos: Some(MinWordSizeForTypos { + one_typo: Some(5), + two_typos: Some(9), + }), + }; + + let typo_tolerance = TypoToleranceSettings { + disable_on_attributes: Some(vec!["title".to_string()]), + ..Default::default() + }; + + let task_info = index.set_typo_tolerance(&typo_tolerance).await.unwrap(); + client.wait_for_task(task_info, None, None).await.unwrap(); + + let res = index.get_typo_tolerance().await.unwrap(); + + assert_eq!(expected, res); + } + + #[meilisearch_test] + async fn test_reset_typo_tolerance(index: Index) { + let expected = TypoToleranceSettings { + enabled: Some(true), + disable_on_attributes: Some(vec![]), + disable_on_words: Some(vec![]), + min_word_size_for_typos: Some(MinWordSizeForTypos { + one_typo: Some(5), + two_typos: Some(9), + }), + }; + + let typo_tolerance = TypoToleranceSettings { + disable_on_attributes: Some(vec!["title".to_string()]), + ..Default::default() + }; + + let task = index.set_typo_tolerance(&typo_tolerance).await.unwrap(); + index.wait_for_task(task, None, None).await.unwrap(); + + let reset_task = index.reset_typo_tolerance().await.unwrap(); + index.wait_for_task(reset_task, None, None).await.unwrap(); + + let default = index.get_typo_tolerance().await.unwrap(); + + assert_eq!(expected, default); + } + + #[meilisearch_test] + async fn test_get_proximity_precision(index: Index) { + let expected = "byWord".to_string(); + + let res = index.get_proximity_precision().await.unwrap(); + + assert_eq!(expected, res); + } + + #[meilisearch_test] + async fn test_set_proximity_precision(client: Client, index: Index) { + let expected = "byAttribute".to_string(); + + let task_info = index + .set_proximity_precision("byAttribute".to_string()) + .await + .unwrap(); + client.wait_for_task(task_info, None, None).await.unwrap(); + + let res = index.get_proximity_precision().await.unwrap(); + + assert_eq!(expected, res); + } + + #[meilisearch_test] + async fn test_reset_proximity_precision(index: Index) { + let expected = "byWord".to_string(); + + let task = index + .set_proximity_precision("byAttribute".to_string()) + .await + .unwrap(); + index.wait_for_task(task, None, None).await.unwrap(); + + let reset_task = index.reset_proximity_precision().await.unwrap(); + index.wait_for_task(reset_task, None, None).await.unwrap(); + + let default = index.get_proximity_precision().await.unwrap(); + + assert_eq!(expected, default); + } + + #[meilisearch_test] + async fn test_get_search_cutoff_ms(index: Index) { + let expected = None; + + let res = index.get_search_cutoff_ms().await.unwrap(); + + assert_eq!(expected, res); + } + + #[meilisearch_test] + async fn test_set_search_cutoff_ms(client: Client, index: Index) { + let expected = Some(150); + + let task_info = index.set_search_cutoff_ms(Some(150)).await.unwrap(); + client.wait_for_task(task_info, None, None).await.unwrap(); + + let res = index.get_search_cutoff_ms().await.unwrap(); + + assert_eq!(expected, res); + } + + #[meilisearch_test] + async fn test_get_separator_tokens(index: Index) { + let separator: Vec<&str> = vec![]; + let res = index.get_separator_tokens().await.unwrap(); + + assert_eq!(separator, res); + } + + #[meilisearch_test] + async fn test_set_separator_tokens(client: Client, index: Index) { + let expected: Vec = vec!["#".to_string(), "@".to_string()]; + + let task_info = index.set_separator_tokens(&expected).await.unwrap(); + client.wait_for_task(task_info, None, None).await.unwrap(); + + let res = index.get_separator_tokens().await.unwrap(); + + assert_eq!(expected, res); + } + + #[meilisearch_test] + async fn test_reset_search_cutoff_ms(index: Index) { + let expected = None; + + let task = index.set_search_cutoff_ms(Some(150)).await.unwrap(); + index.wait_for_task(task, None, None).await.unwrap(); + + let reset_task = index.reset_search_cutoff_ms().await.unwrap(); + index.wait_for_task(reset_task, None, None).await.unwrap(); + + let default = index.get_search_cutoff_ms().await.unwrap(); + + assert_eq!(expected, default); + } + + #[meilisearch_test] + async fn test_reset_separator_tokens(client: Client, index: Index) { + let separator: Vec<&str> = vec![]; + let task_info = index.reset_separator_tokens().await.unwrap(); + client.wait_for_task(task_info, None, None).await.unwrap(); + + let res = index.get_dictionary().await.unwrap(); + assert_eq!(separator, res); + } + + #[meilisearch_test] + async fn test_get_non_separator_tokens(index: Index) { + let separator: Vec<&str> = vec![]; + let res = index.get_non_separator_tokens().await.unwrap(); + + assert_eq!(separator, res); + } + + #[meilisearch_test] + async fn test_set_non_separator_tokens(client: Client, index: Index) { + let expected: Vec = vec!["#".to_string(), "@".to_string()]; + + let task_info = index.set_non_separator_tokens(&expected).await.unwrap(); + client.wait_for_task(task_info, None, None).await.unwrap(); + + let res = index.get_non_separator_tokens().await.unwrap(); + + assert_eq!(expected, res); + } + + #[meilisearch_test] + async fn test_reset_non_separator_tokens(client: Client, index: Index) { + let separator: Vec<&str> = vec![]; + let task_info = index.reset_non_separator_tokens().await.unwrap(); + client.wait_for_task(task_info, None, None).await.unwrap(); + + let res = index.get_dictionary().await.unwrap(); + assert_eq!(separator, res); + } + + #[meilisearch_test] + async fn test_get_localized_attributes(index: Index) { + let res = index.get_localized_attributes().await.unwrap(); + assert_eq!(None, res); + } + + #[meilisearch_test] + async fn test_set_localized_attributes(client: Client, index: Index) { + let localized_attributes = vec![LocalizedAttributes { + locales: vec!["jpn".to_string()], + attribute_patterns: vec!["*_ja".to_string()], + }]; + let task_info = index + .set_localized_attributes(&localized_attributes) + .await + .unwrap(); + client.wait_for_task(task_info, None, None).await.unwrap(); + + let res = index.get_localized_attributes().await.unwrap(); + assert_eq!(Some(localized_attributes), res); + } + + #[meilisearch_test] + async fn test_reset_localized_attributes(client: Client, index: Index) { + let localized_attributes = vec![LocalizedAttributes { + locales: vec!["jpn".to_string()], + attribute_patterns: vec!["*_ja".to_string()], + }]; + let task_info = index + .set_localized_attributes(&localized_attributes) + .await + .unwrap(); + client.wait_for_task(task_info, None, None).await.unwrap(); + + let reset_task = index.reset_localized_attributes().await.unwrap(); + client.wait_for_task(reset_task, None, None).await.unwrap(); + + let res = index.get_localized_attributes().await.unwrap(); + assert_eq!(None, res); + } +} diff --git a/backend/vendor/meilisearch-sdk/src/snapshots.rs b/backend/vendor/meilisearch-sdk/src/snapshots.rs new file mode 100644 index 000000000..271322772 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/snapshots.rs @@ -0,0 +1,116 @@ +//! The `snapshots` module allows the creation of database snapshots. +//! +//! - snapshots are `.snapshots` files that can be used to launch Meilisearch. +//! +//! - snapshots are not compatible between Meilisearch versions. +//! +//! # Example +//! +//! ``` +//! # use meilisearch_sdk::{client::*, errors::*, snapshots::*, snapshots::*, task_info::*, tasks::*}; +//! # use futures_await_test::async_test; +//! # use std::{thread::sleep, time::Duration}; +//! # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { +//! # +//! # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); +//! # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); +//! # +//! # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); +//! +//! // Create a snapshot +//! let task_info = client.create_snapshot().await.unwrap(); +//! assert!(matches!( +//! task_info, +//! TaskInfo { +//! update_type: TaskType::SnapshotCreation { .. }, +//! .. +//! } +//! )); +//! # }); +//! ``` + +use crate::{client::Client, errors::Error, request::*, task_info::TaskInfo}; + +/// Snapshots related methods. +/// See the [snapshots](crate::snapshots) module. +impl Client { + /// Triggers a snapshots creation process. + /// + /// Once the process is complete, a snapshots is created in the [snapshots directory]. + /// If the snapshots directory does not exist yet, it will be created. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, errors::*, snapshots::*, snapshots::*, task_info::*, tasks::*}; + /// # use futures_await_test::async_test; + /// # use std::{thread::sleep, time::Duration}; + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// # + /// let task_info = client.create_snapshot().await.unwrap(); + /// + /// assert!(matches!( + /// task_info, + /// TaskInfo { + /// update_type: TaskType::SnapshotCreation { .. }, + /// .. + /// } + /// )); + /// # }); + /// ``` + pub async fn create_snapshot(&self) -> Result { + self.http_client + .request::<(), (), TaskInfo>( + &format!("{}/snapshots", self.host), + Method::Post { + query: (), + body: (), + }, + 202, + ) + .await + } +} + +/// Alias for [`create_snapshot`](Client::create_snapshot). +pub async fn create_snapshot(client: &Client) -> Result { + client.create_snapshot().await +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{client::*, tasks::*}; + use meilisearch_test_macro::meilisearch_test; + + #[meilisearch_test] + async fn test_snapshot_success_creation(client: Client) -> Result<(), Error> { + let task = client + .create_snapshot() + .await? + .wait_for_completion(&client, None, None) + .await?; + + assert!(matches!(task, Task::Succeeded { .. })); + Ok(()) + } + + #[meilisearch_test] + async fn test_snapshot_correct_update_type(client: Client) -> Result<(), Error> { + let task_info = client.create_snapshot().await.unwrap(); + + assert!(matches!( + task_info, + TaskInfo { + update_type: TaskType::SnapshotCreation { .. }, + .. + } + )); + Ok(()) + } +} diff --git a/backend/vendor/meilisearch-sdk/src/task_info.rs b/backend/vendor/meilisearch-sdk/src/task_info.rs new file mode 100644 index 000000000..c65c2186a --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/task_info.rs @@ -0,0 +1,181 @@ +use serde::Deserialize; +use std::time::Duration; +use time::OffsetDateTime; + +use crate::{client::Client, errors::Error, request::HttpClient, tasks::*}; + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct TaskInfo { + #[serde(with = "time::serde::rfc3339")] + pub enqueued_at: OffsetDateTime, + pub index_uid: Option, + pub status: String, + #[serde(flatten)] + pub update_type: TaskType, + pub task_uid: u32, +} + +impl AsRef for TaskInfo { + fn as_ref(&self) -> &u32 { + &self.task_uid + } +} + +impl TaskInfo { + #[must_use] + pub fn get_task_uid(&self) -> u32 { + self.task_uid + } + + /// Wait until Meilisearch processes a task provided by [`TaskInfo`], and get its status. + /// + /// `interval` = The frequency at which the server should be polled. **Default = 50ms** + /// + /// `timeout` = The maximum time to wait for processing to complete. **Default = 5000ms** + /// + /// If the waited time exceeds `timeout` then an [`Error::Timeout`] will be returned. + /// + /// See also [`Client::wait_for_task`, `Index::wait_for_task`]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, tasks::*}; + /// # use serde::{Serialize, Deserialize}; + /// # + /// # #[derive(Debug, Serialize, Deserialize, PartialEq)] + /// # struct Document { + /// # id: usize, + /// # value: String, + /// # kind: String, + /// # } + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movies = client.index("movies_wait_for_completion"); + /// + /// let status = movies.add_documents(&[ + /// Document { id: 0, kind: "title".into(), value: "The Social Network".to_string() }, + /// Document { id: 1, kind: "title".into(), value: "Harry Potter and the Sorcerer's Stone".to_string() }, + /// ], None) + /// .await + /// .unwrap() + /// .wait_for_completion(&client, None, None) + /// .await + /// .unwrap(); + /// + /// assert!(matches!(status, Task::Succeeded { .. })); + /// # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn wait_for_completion( + self, + client: &Client, + interval: Option, + timeout: Option, + ) -> Result { + client.wait_for_task(self, interval, timeout).await + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::{ + client::*, + errors::{ErrorCode, ErrorType}, + indexes::Index, + }; + use big_s::S; + use meilisearch_test_macro::meilisearch_test; + use serde::{Deserialize, Serialize}; + use std::time::Duration; + + #[derive(Debug, Serialize, Deserialize, PartialEq)] + struct Document { + id: usize, + value: String, + kind: String, + } + + #[test] + fn test_deserialize_task_info() { + let datetime = OffsetDateTime::parse( + "2022-02-03T13:02:38.369634Z", + &time::format_description::well_known::Rfc3339, + ) + .unwrap(); + + let task_info: TaskInfo = serde_json::from_str( + r#" +{ + "enqueuedAt": "2022-02-03T13:02:38.369634Z", + "indexUid": "meili", + "status": "enqueued", + "type": "documentAdditionOrUpdate", + "taskUid": 12 +}"#, + ) + .unwrap(); + + assert!(matches!( + task_info, + TaskInfo { + enqueued_at, + index_uid: Some(index_uid), + task_uid: 12, + update_type: TaskType::DocumentAdditionOrUpdate { details: None }, + status, + } + if enqueued_at == datetime && index_uid == "meili" && status == "enqueued")); + } + + #[meilisearch_test] + async fn test_wait_for_task_with_args(client: Client, movies: Index) -> Result<(), Error> { + let task_info = movies + .add_documents( + &[ + Document { + id: 0, + kind: "title".into(), + value: S("The Social Network"), + }, + Document { + id: 1, + kind: "title".into(), + value: S("Harry Potter and the Sorcerer's Stone"), + }, + ], + None, + ) + .await?; + + let task = client + .get_task(task_info) + .await? + .wait_for_completion( + &client, + Some(Duration::from_millis(1)), + Some(Duration::from_millis(6000)), + ) + .await?; + + assert!(matches!(task, Task::Succeeded { .. })); + Ok(()) + } + + #[meilisearch_test] + async fn test_failing_task(client: Client, index: Index) -> Result<(), Error> { + let task_info = client.create_index(index.uid, None).await.unwrap(); + let task = client.wait_for_task(task_info, None, None).await?; + + let error = task.unwrap_failure(); + assert_eq!(error.error_code, ErrorCode::IndexAlreadyExists); + assert_eq!(error.error_type, ErrorType::InvalidRequest); + Ok(()) + } +} diff --git a/backend/vendor/meilisearch-sdk/src/tasks.rs b/backend/vendor/meilisearch-sdk/src/tasks.rs new file mode 100644 index 000000000..37babd2f4 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/tasks.rs @@ -0,0 +1,1143 @@ +use serde::{Deserialize, Deserializer, Serialize}; +use std::time::Duration; +use time::OffsetDateTime; + +use crate::{ + client::Client, client::SwapIndexes, errors::Error, errors::MeilisearchError, indexes::Index, + request::HttpClient, settings::Settings, task_info::TaskInfo, +}; + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase", tag = "type")] +pub enum TaskType { + Customs, + DocumentAdditionOrUpdate { + details: Option, + }, + DocumentDeletion { + details: Option, + }, + IndexCreation { + details: Option, + }, + IndexUpdate { + details: Option, + }, + IndexDeletion { + details: Option, + }, + SettingsUpdate { + details: Box>, + }, + DumpCreation { + details: Option, + }, + IndexSwap { + details: Option, + }, + TaskCancelation { + details: Option, + }, + TaskDeletion { + details: Option, + }, + SnapshotCreation { + details: Option, + }, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct TasksResults { + pub results: Vec, + pub total: u64, + pub limit: u32, + pub from: Option, + pub next: Option, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct DocumentAdditionOrUpdate { + pub indexed_documents: Option, + pub received_documents: usize, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct DocumentDeletion { + pub provided_ids: Option, + pub deleted_documents: Option, + pub original_filter: Option, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct IndexCreation { + pub primary_key: Option, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct IndexUpdate { + pub primary_key: Option, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct IndexDeletion { + pub deleted_documents: Option, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct SnapshotCreation {} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct DumpCreation { + pub dump_uid: Option, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct IndexSwap { + pub swaps: Vec, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct TaskCancelation { + pub matched_tasks: usize, + pub canceled_tasks: usize, + pub original_filter: String, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct TaskDeletion { + pub matched_tasks: usize, + pub deleted_tasks: usize, + pub original_filter: String, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct FailedTask { + pub error: MeilisearchError, + #[serde(flatten)] + pub task: SucceededTask, +} + +impl AsRef for FailedTask { + fn as_ref(&self) -> &u32 { + &self.task.uid + } +} + +fn deserialize_duration<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + let s = String::deserialize(deserializer)?; + let iso_duration = iso8601::duration(&s).map_err(serde::de::Error::custom)?; + Ok(iso_duration.into()) +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct SucceededTask { + #[serde(deserialize_with = "deserialize_duration")] + pub duration: Duration, + #[serde(with = "time::serde::rfc3339")] + pub enqueued_at: OffsetDateTime, + #[serde(with = "time::serde::rfc3339")] + pub started_at: OffsetDateTime, + #[serde(with = "time::serde::rfc3339")] + pub finished_at: OffsetDateTime, + pub canceled_by: Option, + pub index_uid: Option, + pub error: Option, + #[serde(flatten)] + pub update_type: TaskType, + pub uid: u32, +} + +impl AsRef for SucceededTask { + fn as_ref(&self) -> &u32 { + &self.uid + } +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct EnqueuedTask { + #[serde(with = "time::serde::rfc3339")] + pub enqueued_at: OffsetDateTime, + pub index_uid: Option, + #[serde(flatten)] + pub update_type: TaskType, + pub uid: u32, +} + +impl AsRef for EnqueuedTask { + fn as_ref(&self) -> &u32 { + &self.uid + } +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ProcessingTask { + #[serde(with = "time::serde::rfc3339")] + pub enqueued_at: OffsetDateTime, + #[serde(with = "time::serde::rfc3339")] + pub started_at: OffsetDateTime, + pub index_uid: Option, + #[serde(flatten)] + pub update_type: TaskType, + pub uid: u32, +} + +impl AsRef for ProcessingTask { + fn as_ref(&self) -> &u32 { + &self.uid + } +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(rename_all = "camelCase", tag = "status")] +pub enum Task { + Enqueued { + #[serde(flatten)] + content: EnqueuedTask, + }, + Processing { + #[serde(flatten)] + content: ProcessingTask, + }, + Failed { + #[serde(flatten)] + content: FailedTask, + }, + Succeeded { + #[serde(flatten)] + content: SucceededTask, + }, +} + +impl Task { + #[must_use] + pub fn get_uid(&self) -> u32 { + match self { + Self::Enqueued { content } => *content.as_ref(), + Self::Processing { content } => *content.as_ref(), + Self::Failed { content } => *content.as_ref(), + Self::Succeeded { content } => *content.as_ref(), + } + } + + /// Wait until Meilisearch processes a [Task], and get its status. + /// + /// `interval` = The frequency at which the server should be polled. **Default = 50ms** + /// + /// `timeout` = The maximum time to wait for processing to complete. **Default = 5000ms** + /// + /// If the waited time exceeds `timeout` then an [`Error::Timeout`] will be returned. + /// + /// See also [`Client::wait_for_task`, `Index::wait_for_task`]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, tasks::Task}; + /// # use serde::{Serialize, Deserialize}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # #[derive(Debug, Serialize, Deserialize, PartialEq)] + /// # struct Document { + /// # id: usize, + /// # value: String, + /// # kind: String, + /// # } + /// # + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let movies = client.index("movies_wait_for_completion"); + /// + /// let status = movies.add_documents(&[ + /// Document { id: 0, kind: "title".into(), value: "The Social Network".to_string() }, + /// Document { id: 1, kind: "title".into(), value: "Harry Potter and the Sorcerer's Stone".to_string() }, + /// ], None) + /// .await + /// .unwrap() + /// .wait_for_completion(&client, None, None) + /// .await + /// .unwrap(); + /// + /// assert!(matches!(status, Task::Succeeded { .. })); + /// # movies.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + pub async fn wait_for_completion( + self, + client: &Client, + interval: Option, + timeout: Option, + ) -> Result { + client.wait_for_task(self, interval, timeout).await + } + + /// Extract the [Index] from a successful `IndexCreation` task. + /// + /// If the task failed or was not an `IndexCreation` task it returns itself. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # // create the client + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let task = client.create_index("try_make_index", None).await.unwrap(); + /// let index = client.wait_for_task(task, None, None).await.unwrap().try_make_index(&client).unwrap(); + /// + /// // and safely access it + /// assert_eq!(index.as_ref(), "try_make_index"); + /// # index.delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + #[allow(clippy::result_large_err)] // Since `self` has been consumed, this is not an issue + pub fn try_make_index( + self, + client: &Client, + ) -> Result, Self> { + match self { + Self::Succeeded { + content: + SucceededTask { + index_uid, + update_type: TaskType::IndexCreation { .. }, + .. + }, + } => Ok(client.index(index_uid.unwrap())), + _ => Err(self), + } + } + + /// Unwrap the [`MeilisearchError`] from a [`Self::Failed`] [Task]. + /// + /// Will panic if the task was not [`Self::Failed`]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, errors::ErrorCode}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let task = client.create_index("unwrap_failure", None).await.unwrap(); + /// let task = client + /// .create_index("unwrap_failure", None) + /// .await + /// .unwrap() + /// .wait_for_completion(&client, None, None) + /// .await + /// .unwrap(); + /// + /// assert!(task.is_failure()); + /// + /// let failure = task.unwrap_failure(); + /// + /// assert_eq!(failure.error_code, ErrorCode::IndexAlreadyExists); + /// # client.index("unwrap_failure").delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + #[must_use] + pub fn unwrap_failure(self) -> MeilisearchError { + match self { + Self::Failed { + content: FailedTask { error, .. }, + } => error, + _ => panic!("Called `unwrap_failure` on a non `Failed` task."), + } + } + + /// Returns `true` if the [Task] is [`Self::Failed`]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, errors::ErrorCode}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let task = client.create_index("is_failure", None).await.unwrap(); + /// // create an index with a conflicting uid + /// let task = client + /// .create_index("is_failure", None) + /// .await + /// .unwrap() + /// .wait_for_completion(&client, None, None) + /// .await + /// .unwrap(); + /// + /// assert!(task.is_failure()); + /// # client.index("is_failure").delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + #[must_use] + pub fn is_failure(&self) -> bool { + matches!(self, Self::Failed { .. }) + } + + /// Returns `true` if the [Task] is [`Self::Succeeded`]. + /// + /// # Example + /// + /// ``` + /// # use meilisearch_sdk::{client::*, indexes::*, errors::ErrorCode}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let task = client + /// .create_index("is_success", None) + /// .await + /// .unwrap() + /// .wait_for_completion(&client, None, None) + /// .await + /// .unwrap(); + /// + /// assert!(task.is_success()); + /// # task.try_make_index(&client).unwrap().delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + #[must_use] + pub fn is_success(&self) -> bool { + matches!(self, Self::Succeeded { .. }) + } + + /// Returns `true` if the [Task] is pending ([`Self::Enqueued`] or [`Self::Processing`]). + /// + /// # Example + /// ```no_run + /// # // The test is not run because it checks for an enqueued or processed status + /// # // and the task might already be processed when checking the status after the get_task call + /// # use meilisearch_sdk::{client::*, indexes::*, errors::ErrorCode}; + /// # + /// # let MEILISEARCH_URL = option_env!("MEILISEARCH_URL").unwrap_or("http://localhost:7700"); + /// # let MEILISEARCH_API_KEY = option_env!("MEILISEARCH_API_KEY").unwrap_or("masterKey"); + /// # + /// # tokio::runtime::Builder::new_current_thread().enable_all().build().unwrap().block_on(async { + /// # let client = Client::new(MEILISEARCH_URL, Some(MEILISEARCH_API_KEY)).unwrap(); + /// let task_info = client + /// .create_index("is_pending", None) + /// .await + /// .unwrap(); + /// let task = client.get_task(task_info).await.unwrap(); + /// + /// assert!(task.is_pending()); + /// # task.wait_for_completion(&client, None, None).await.unwrap().try_make_index(&client).unwrap().delete().await.unwrap().wait_for_completion(&client, None, None).await.unwrap(); + /// # }); + /// ``` + #[must_use] + pub fn is_pending(&self) -> bool { + matches!(self, Self::Enqueued { .. } | Self::Processing { .. }) + } +} + +impl AsRef for Task { + fn as_ref(&self) -> &u32 { + match self { + Self::Enqueued { content } => content.as_ref(), + Self::Processing { content } => content.as_ref(), + Self::Succeeded { content } => content.as_ref(), + Self::Failed { content } => content.as_ref(), + } + } +} + +#[derive(Debug, Serialize, Clone)] +pub struct TasksPaginationFilters { + // Maximum number of tasks to return. + #[serde(skip_serializing_if = "Option::is_none")] + limit: Option, + // The first task uid that should be returned. + #[serde(skip_serializing_if = "Option::is_none")] + from: Option, +} + +#[derive(Debug, Serialize, Clone)] +pub struct TasksCancelFilters {} + +#[derive(Debug, Serialize, Clone)] +pub struct TasksDeleteFilters {} + +pub type TasksSearchQuery<'a, Http> = TasksQuery<'a, TasksPaginationFilters, Http>; +pub type TasksCancelQuery<'a, Http> = TasksQuery<'a, TasksCancelFilters, Http>; +pub type TasksDeleteQuery<'a, Http> = TasksQuery<'a, TasksDeleteFilters, Http>; + +#[derive(Debug, Serialize, Clone)] +#[serde(rename_all = "camelCase")] +pub struct TasksQuery<'a, T, Http: HttpClient> { + #[serde(skip_serializing)] + client: &'a Client, + // Index uids array to only retrieve the tasks of the indexes. + #[serde(skip_serializing_if = "Option::is_none")] + index_uids: Option>, + // Statuses array to only retrieve the tasks with these statuses. + #[serde(skip_serializing_if = "Option::is_none")] + statuses: Option>, + // Types array to only retrieve the tasks with these [TaskType]. + #[serde(skip_serializing_if = "Option::is_none", rename = "types")] + task_types: Option>, + // Uids of the tasks to retrieve. + #[serde(skip_serializing_if = "Option::is_none")] + uids: Option>, + // Uids of the tasks that canceled other tasks. + #[serde(skip_serializing_if = "Option::is_none")] + canceled_by: Option>, + // Date to retrieve all tasks that were enqueued before it. + #[serde( + skip_serializing_if = "Option::is_none", + serialize_with = "time::serde::rfc3339::option::serialize" + )] + before_enqueued_at: Option, + // Date to retrieve all tasks that were enqueued after it. + #[serde( + skip_serializing_if = "Option::is_none", + serialize_with = "time::serde::rfc3339::option::serialize" + )] + after_enqueued_at: Option, + // Date to retrieve all tasks that were started before it. + #[serde( + skip_serializing_if = "Option::is_none", + serialize_with = "time::serde::rfc3339::option::serialize" + )] + before_started_at: Option, + // Date to retrieve all tasks that were started after it. + #[serde( + skip_serializing_if = "Option::is_none", + serialize_with = "time::serde::rfc3339::option::serialize" + )] + after_started_at: Option, + // Date to retrieve all tasks that were finished before it. + #[serde( + skip_serializing_if = "Option::is_none", + serialize_with = "time::serde::rfc3339::option::serialize" + )] + before_finished_at: Option, + // Date to retrieve all tasks that were finished after it. + #[serde( + skip_serializing_if = "Option::is_none", + serialize_with = "time::serde::rfc3339::option::serialize" + )] + after_finished_at: Option, + + #[serde(flatten)] + pagination: T, +} + +#[allow(missing_docs)] +impl<'a, T, Http: HttpClient> TasksQuery<'a, T, Http> { + pub fn with_index_uids<'b>( + &'b mut self, + index_uids: impl IntoIterator, + ) -> &'b mut TasksQuery<'a, T, Http> { + self.index_uids = Some(index_uids.into_iter().collect()); + self + } + pub fn with_statuses<'b>( + &'b mut self, + statuses: impl IntoIterator, + ) -> &'b mut TasksQuery<'a, T, Http> { + self.statuses = Some(statuses.into_iter().collect()); + self + } + pub fn with_types<'b>( + &'b mut self, + task_types: impl IntoIterator, + ) -> &'b mut TasksQuery<'a, T, Http> { + self.task_types = Some(task_types.into_iter().collect()); + self + } + pub fn with_uids<'b>( + &'b mut self, + uids: impl IntoIterator, + ) -> &'b mut TasksQuery<'a, T, Http> { + self.uids = Some(uids.into_iter().collect()); + self + } + pub fn with_before_enqueued_at<'b>( + &'b mut self, + before_enqueued_at: &'a OffsetDateTime, + ) -> &'b mut TasksQuery<'a, T, Http> { + self.before_enqueued_at = Some(*before_enqueued_at); + self + } + pub fn with_after_enqueued_at<'b>( + &'b mut self, + after_enqueued_at: &'a OffsetDateTime, + ) -> &'b mut TasksQuery<'a, T, Http> { + self.after_enqueued_at = Some(*after_enqueued_at); + self + } + pub fn with_before_started_at<'b>( + &'b mut self, + before_started_at: &'a OffsetDateTime, + ) -> &'b mut TasksQuery<'a, T, Http> { + self.before_started_at = Some(*before_started_at); + self + } + pub fn with_after_started_at<'b>( + &'b mut self, + after_started_at: &'a OffsetDateTime, + ) -> &'b mut TasksQuery<'a, T, Http> { + self.after_started_at = Some(*after_started_at); + self + } + pub fn with_before_finished_at<'b>( + &'b mut self, + before_finished_at: &'a OffsetDateTime, + ) -> &'b mut TasksQuery<'a, T, Http> { + self.before_finished_at = Some(*before_finished_at); + self + } + pub fn with_after_finished_at<'b>( + &'b mut self, + after_finished_at: &'a OffsetDateTime, + ) -> &'b mut TasksQuery<'a, T, Http> { + self.after_finished_at = Some(*after_finished_at); + self + } + pub fn with_canceled_by<'b>( + &'b mut self, + task_uids: impl IntoIterator, + ) -> &'b mut TasksQuery<'a, T, Http> { + self.canceled_by = Some(task_uids.into_iter().collect()); + self + } +} + +impl<'a, Http: HttpClient> TasksQuery<'a, TasksCancelFilters, Http> { + #[must_use] + pub fn new(client: &'a Client) -> TasksQuery<'a, TasksCancelFilters, Http> { + TasksQuery { + client, + index_uids: None, + statuses: None, + task_types: None, + uids: None, + canceled_by: None, + before_enqueued_at: None, + after_enqueued_at: None, + before_started_at: None, + after_started_at: None, + before_finished_at: None, + after_finished_at: None, + pagination: TasksCancelFilters {}, + } + } + + pub async fn execute(&'a self) -> Result { + self.client.cancel_tasks_with(self).await + } +} + +impl<'a, Http: HttpClient> TasksQuery<'a, TasksDeleteFilters, Http> { + #[must_use] + pub fn new(client: &'a Client) -> TasksQuery<'a, TasksDeleteFilters, Http> { + TasksQuery { + client, + index_uids: None, + statuses: None, + task_types: None, + uids: None, + canceled_by: None, + before_enqueued_at: None, + after_enqueued_at: None, + before_started_at: None, + after_started_at: None, + before_finished_at: None, + after_finished_at: None, + pagination: TasksDeleteFilters {}, + } + } + + pub async fn execute(&'a self) -> Result { + self.client.delete_tasks_with(self).await + } +} + +impl<'a, Http: HttpClient> TasksQuery<'a, TasksPaginationFilters, Http> { + #[must_use] + pub fn new(client: &'a Client) -> TasksQuery<'a, TasksPaginationFilters, Http> { + TasksQuery { + client, + index_uids: None, + statuses: None, + task_types: None, + uids: None, + canceled_by: None, + before_enqueued_at: None, + after_enqueued_at: None, + before_started_at: None, + after_started_at: None, + before_finished_at: None, + after_finished_at: None, + pagination: TasksPaginationFilters { + limit: None, + from: None, + }, + } + } + pub fn with_limit<'b>( + &'b mut self, + limit: u32, + ) -> &'b mut TasksQuery<'a, TasksPaginationFilters, Http> { + self.pagination.limit = Some(limit); + self + } + pub fn with_from<'b>( + &'b mut self, + from: u32, + ) -> &'b mut TasksQuery<'a, TasksPaginationFilters, Http> { + self.pagination.from = Some(from); + self + } + pub async fn execute(&'a self) -> Result { + self.client.get_tasks_with(self).await + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::{ + client::*, + errors::{ErrorCode, ErrorType}, + }; + use big_s::S; + use meilisearch_test_macro::meilisearch_test; + use serde::{Deserialize, Serialize}; + use std::time::Duration; + + #[derive(Debug, Serialize, Deserialize, PartialEq)] + struct Document { + id: usize, + value: String, + kind: String, + } + + #[test] + fn test_deserialize_task() { + let datetime = OffsetDateTime::parse( + "2022-02-03T13:02:38.369634Z", + &time::format_description::well_known::Rfc3339, + ) + .unwrap(); + + let task: Task = serde_json::from_str( + r#" +{ + "enqueuedAt": "2022-02-03T13:02:38.369634Z", + "indexUid": "meili", + "status": "enqueued", + "type": "documentAdditionOrUpdate", + "uid": 12 +}"#, + ) + .unwrap(); + + assert!(matches!( + task, + Task::Enqueued { + content: EnqueuedTask { + enqueued_at, + index_uid: Some(index_uid), + update_type: TaskType::DocumentAdditionOrUpdate { details: None }, + uid: 12, + } + } + if enqueued_at == datetime && index_uid == "meili")); + + let task: Task = serde_json::from_str( + r#" +{ + "details": { + "indexedDocuments": null, + "receivedDocuments": 19547 + }, + "duration": null, + "enqueuedAt": "2022-02-03T15:17:02.801341Z", + "finishedAt": null, + "indexUid": "meili", + "startedAt": "2022-02-03T15:17:02.812338Z", + "status": "processing", + "type": "documentAdditionOrUpdate", + "uid": 14 +}"#, + ) + .unwrap(); + + assert!(matches!( + task, + Task::Processing { + content: ProcessingTask { + started_at, + update_type: TaskType::DocumentAdditionOrUpdate { + details: Some(DocumentAdditionOrUpdate { + received_documents: 19547, + indexed_documents: None, + }) + }, + uid: 14, + .. + } + } + if started_at == OffsetDateTime::parse( + "2022-02-03T15:17:02.812338Z", + &time::format_description::well_known::Rfc3339 + ).unwrap() + )); + + let task: Task = serde_json::from_str( + r#" +{ + "details": { + "indexedDocuments": 19546, + "receivedDocuments": 19547 + }, + "duration": "PT10.848957S", + "enqueuedAt": "2022-02-03T15:17:02.801341Z", + "finishedAt": "2022-02-03T15:17:13.661295Z", + "indexUid": "meili", + "startedAt": "2022-02-03T15:17:02.812338Z", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "uid": 14 +}"#, + ) + .unwrap(); + + assert!(matches!( + task, + Task::Succeeded { + content: SucceededTask { + update_type: TaskType::DocumentAdditionOrUpdate { + details: Some(DocumentAdditionOrUpdate { + received_documents: 19547, + indexed_documents: Some(19546), + }) + }, + uid: 14, + duration, + .. + } + } + if duration == Duration::from_millis(10_848) + )); + } + + #[meilisearch_test] + async fn test_wait_for_task_with_args(client: Client, movies: Index) -> Result<(), Error> { + let task = movies + .add_documents( + &[ + Document { + id: 0, + kind: "title".into(), + value: S("The Social Network"), + }, + Document { + id: 1, + kind: "title".into(), + value: S("Harry Potter and the Sorcerer's Stone"), + }, + ], + None, + ) + .await? + .wait_for_completion( + &client, + Some(Duration::from_millis(1)), + Some(Duration::from_millis(6000)), + ) + .await?; + + assert!(matches!(task, Task::Succeeded { .. })); + Ok(()) + } + + #[meilisearch_test] + async fn test_get_tasks_no_params() -> Result<(), Error> { + let mut s = mockito::Server::new_async().await; + let mock_server_url = s.url(); + let client = Client::new(mock_server_url, Some("masterKey")).unwrap(); + let path = "/tasks"; + + let mock_res = s.mock("GET", path).with_status(200).create_async().await; + let _ = client.get_tasks().await; + mock_res.assert_async().await; + + Ok(()) + } + + #[meilisearch_test] + async fn test_get_tasks_with_params() -> Result<(), Error> { + let mut s = mockito::Server::new_async().await; + let mock_server_url = s.url(); + let client = Client::new(mock_server_url, Some("masterKey")).unwrap(); + let path = + "/tasks?indexUids=movies,test&statuses=equeued&types=documentDeletion&uids=1&limit=0&from=1"; + + let mock_res = s.mock("GET", path).with_status(200).create_async().await; + + let mut query = TasksSearchQuery::new(&client); + query + .with_index_uids(["movies", "test"]) + .with_statuses(["equeued"]) + .with_types(["documentDeletion"]) + .with_from(1) + .with_limit(0) + .with_uids([&1]); + + let _ = client.get_tasks_with(&query).await; + + mock_res.assert_async().await; + + Ok(()) + } + + #[meilisearch_test] + async fn test_get_tasks_with_date_params() -> Result<(), Error> { + let mut s = mockito::Server::new_async().await; + let mock_server_url = s.url(); + let client = Client::new(mock_server_url, Some("masterKey")).unwrap(); + let path = "/tasks?\ + beforeEnqueuedAt=2022-02-03T13%3A02%3A38.369634Z\ + &afterEnqueuedAt=2023-02-03T13%3A02%3A38.369634Z\ + &beforeStartedAt=2024-02-03T13%3A02%3A38.369634Z\ + &afterStartedAt=2025-02-03T13%3A02%3A38.369634Z\ + &beforeFinishedAt=2026-02-03T13%3A02%3A38.369634Z\ + &afterFinishedAt=2027-02-03T13%3A02%3A38.369634Z"; + + let mock_res = s.mock("GET", path).with_status(200).create_async().await; + + let before_enqueued_at = OffsetDateTime::parse( + "2022-02-03T13:02:38.369634Z", + &time::format_description::well_known::Rfc3339, + ) + .unwrap(); + let after_enqueued_at = OffsetDateTime::parse( + "2023-02-03T13:02:38.369634Z", + &time::format_description::well_known::Rfc3339, + ) + .unwrap(); + let before_started_at = OffsetDateTime::parse( + "2024-02-03T13:02:38.369634Z", + &time::format_description::well_known::Rfc3339, + ) + .unwrap(); + + let after_started_at = OffsetDateTime::parse( + "2025-02-03T13:02:38.369634Z", + &time::format_description::well_known::Rfc3339, + ) + .unwrap(); + + let before_finished_at = OffsetDateTime::parse( + "2026-02-03T13:02:38.369634Z", + &time::format_description::well_known::Rfc3339, + ) + .unwrap(); + + let after_finished_at = OffsetDateTime::parse( + "2027-02-03T13:02:38.369634Z", + &time::format_description::well_known::Rfc3339, + ) + .unwrap(); + + let mut query = TasksSearchQuery::new(&client); + query + .with_before_enqueued_at(&before_enqueued_at) + .with_after_enqueued_at(&after_enqueued_at) + .with_before_started_at(&before_started_at) + .with_after_started_at(&after_started_at) + .with_before_finished_at(&before_finished_at) + .with_after_finished_at(&after_finished_at); + + let _ = client.get_tasks_with(&query).await; + + mock_res.assert_async().await; + + Ok(()) + } + + #[meilisearch_test] + async fn test_get_tasks_on_struct_with_params() -> Result<(), Error> { + let mut s = mockito::Server::new_async().await; + let mock_server_url = s.url(); + let client = Client::new(mock_server_url, Some("masterKey")).unwrap(); + let path = + "/tasks?indexUids=movies,test&statuses=equeued&types=documentDeletion&canceledBy=9"; + + let mock_res = s.mock("GET", path).with_status(200).create_async().await; + + let mut query = TasksSearchQuery::new(&client); + let _ = query + .with_index_uids(["movies", "test"]) + .with_statuses(["equeued"]) + .with_types(["documentDeletion"]) + .with_canceled_by([&9]) + .execute() + .await; + + mock_res.assert_async().await; + + Ok(()) + } + + #[meilisearch_test] + async fn test_get_tasks_with_none_existant_index_uids(client: Client) -> Result<(), Error> { + let mut query = TasksSearchQuery::new(&client); + query.with_index_uids(["no_name"]); + let tasks = client.get_tasks_with(&query).await.unwrap(); + + assert_eq!(tasks.results.len(), 0); + Ok(()) + } + + #[meilisearch_test] + async fn test_get_tasks_with_execute(client: Client) -> Result<(), Error> { + let tasks = TasksSearchQuery::new(&client) + .with_index_uids(["no_name"]) + .execute() + .await + .unwrap(); + + assert_eq!(tasks.results.len(), 0); + Ok(()) + } + + #[meilisearch_test] + async fn test_failing_task(client: Client, index: Index) -> Result<(), Error> { + let task_info = client.create_index(index.uid, None).await.unwrap(); + let task = client.get_task(task_info).await?; + let task = client.wait_for_task(task, None, None).await?; + + let error = task.unwrap_failure(); + assert_eq!(error.error_code, ErrorCode::IndexAlreadyExists); + assert_eq!(error.error_type, ErrorType::InvalidRequest); + Ok(()) + } + + #[meilisearch_test] + async fn test_cancel_tasks_with_params() -> Result<(), Error> { + let mut s = mockito::Server::new_async().await; + let mock_server_url = s.url(); + let client = Client::new(mock_server_url, Some("masterKey")).unwrap(); + let path = + "/tasks/cancel?indexUids=movies,test&statuses=equeued&types=documentDeletion&uids=1"; + + let mock_res = s.mock("POST", path).with_status(200).create_async().await; + + let mut query = TasksCancelQuery::new(&client); + query + .with_index_uids(["movies", "test"]) + .with_statuses(["equeued"]) + .with_types(["documentDeletion"]) + .with_uids([&1]); + + let _ = client.cancel_tasks_with(&query).await; + + mock_res.assert_async().await; + + Ok(()) + } + + #[meilisearch_test] + async fn test_cancel_tasks_with_params_execute() -> Result<(), Error> { + let mut s = mockito::Server::new_async().await; + let mock_server_url = s.url(); + let client = Client::new(mock_server_url, Some("masterKey")).unwrap(); + let path = + "/tasks/cancel?indexUids=movies,test&statuses=equeued&types=documentDeletion&uids=1"; + + let mock_res = s.mock("POST", path).with_status(200).create_async().await; + + let mut query = TasksCancelQuery::new(&client); + let _ = query + .with_index_uids(["movies", "test"]) + .with_statuses(["equeued"]) + .with_types(["documentDeletion"]) + .with_uids([&1]) + .execute() + .await; + + mock_res.assert_async().await; + + Ok(()) + } + + #[meilisearch_test] + async fn test_delete_tasks_with_params() -> Result<(), Error> { + let mut s = mockito::Server::new_async().await; + let mock_server_url = s.url(); + let client = Client::new(mock_server_url, Some("masterKey")).unwrap(); + let path = "/tasks?indexUids=movies,test&statuses=equeued&types=documentDeletion&uids=1"; + + let mock_res = s.mock("DELETE", path).with_status(200).create_async().await; + + let mut query = TasksDeleteQuery::new(&client); + query + .with_index_uids(["movies", "test"]) + .with_statuses(["equeued"]) + .with_types(["documentDeletion"]) + .with_uids([&1]); + + let _ = client.delete_tasks_with(&query).await; + + mock_res.assert_async().await; + + Ok(()) + } + + #[meilisearch_test] + async fn test_delete_tasks_with_params_execute() -> Result<(), Error> { + let mut s = mockito::Server::new_async().await; + let mock_server_url = s.url(); + let client = Client::new(mock_server_url, Some("masterKey")).unwrap(); + let path = "/tasks?indexUids=movies,test&statuses=equeued&types=documentDeletion&uids=1"; + + let mock_res = s.mock("DELETE", path).with_status(200).create_async().await; + + let mut query = TasksDeleteQuery::new(&client); + let _ = query + .with_index_uids(["movies", "test"]) + .with_statuses(["equeued"]) + .with_types(["documentDeletion"]) + .with_uids([&1]) + .execute() + .await; + + mock_res.assert_async().await; + + Ok(()) + } +} diff --git a/backend/vendor/meilisearch-sdk/src/tenant_tokens.rs b/backend/vendor/meilisearch-sdk/src/tenant_tokens.rs new file mode 100644 index 000000000..bb561f712 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/tenant_tokens.rs @@ -0,0 +1,177 @@ +use crate::errors::Error; +use jsonwebtoken::{encode, EncodingKey, Header}; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use time::OffsetDateTime; +#[cfg(not(target_arch = "wasm32"))] +use uuid::Uuid; + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +struct TenantTokenClaim { + api_key_uid: String, + search_rules: Value, + #[serde(with = "time::serde::timestamp::option")] + exp: Option, +} + +pub fn generate_tenant_token( + api_key_uid: String, + search_rules: Value, + api_key: impl AsRef, + expires_at: Option, +) -> Result { + // Validate uuid format + let uid = Uuid::try_parse(&api_key_uid)?; + + // Validate uuid version + if uid.get_version_num() != 4 { + return Err(Error::InvalidUuid4Version); + } + + if expires_at.map_or(false, |expires_at| OffsetDateTime::now_utc() > expires_at) { + return Err(Error::TenantTokensExpiredSignature); + } + + let claims = TenantTokenClaim { + api_key_uid, + exp: expires_at, + search_rules, + }; + + let token = encode( + &Header::default(), + &claims, + &EncodingKey::from_secret(api_key.as_ref().as_bytes()), + ); + + Ok(token?) +} + +#[cfg(test)] +mod tests { + use crate::tenant_tokens::*; + use big_s::S; + use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation}; + use serde_json::json; + use std::collections::HashSet; + + const SEARCH_RULES: [&str; 1] = ["*"]; + const VALID_KEY: &str = "a19b6ec84ee31324efa560cd1f7e6939"; + + fn build_validation() -> Validation { + let mut validation = Validation::new(Algorithm::HS256); + validation.validate_exp = false; + validation.required_spec_claims = HashSet::new(); + + validation + } + + #[test] + fn test_generate_token_with_given_key() { + let api_key_uid = S("76cf8b87-fd12-4688-ad34-260d930ca4f4"); + let token = + generate_tenant_token(api_key_uid, json!(SEARCH_RULES), VALID_KEY, None).unwrap(); + + let valid_key = decode::( + &token, + &DecodingKey::from_secret(VALID_KEY.as_ref()), + &build_validation(), + ); + let invalid_key = decode::( + &token, + &DecodingKey::from_secret("not-the-same-key".as_ref()), + &build_validation(), + ); + + assert!(valid_key.is_ok()); + assert!(invalid_key.is_err()); + } + + #[test] + fn test_generate_token_without_uid() { + let api_key_uid = S(""); + let key = S(""); + let token = generate_tenant_token(api_key_uid, json!(SEARCH_RULES), key, None); + + assert!(token.is_err()); + } + + #[test] + fn test_generate_token_with_expiration() { + let api_key_uid = S("76cf8b87-fd12-4688-ad34-260d930ca4f4"); + let exp = OffsetDateTime::now_utc() + time::Duration::HOUR; + let token = + generate_tenant_token(api_key_uid, json!(SEARCH_RULES), VALID_KEY, Some(exp)).unwrap(); + + let decoded = decode::( + &token, + &DecodingKey::from_secret(VALID_KEY.as_ref()), + &Validation::new(Algorithm::HS256), + ); + + assert!(decoded.is_ok()); + } + + #[test] + fn test_generate_token_with_expires_at_in_the_past() { + let api_key_uid = S("76cf8b87-fd12-4688-ad34-260d930ca4f4"); + let exp = OffsetDateTime::now_utc() - time::Duration::HOUR; + let token = generate_tenant_token(api_key_uid, json!(SEARCH_RULES), VALID_KEY, Some(exp)); + + assert!(token.is_err()); + } + + #[test] + fn test_generate_token_contains_claims() { + let api_key_uid = S("76cf8b87-fd12-4688-ad34-260d930ca4f4"); + let token = + generate_tenant_token(api_key_uid.clone(), json!(SEARCH_RULES), VALID_KEY, None) + .unwrap(); + + let decoded = decode::( + &token, + &DecodingKey::from_secret(VALID_KEY.as_ref()), + &build_validation(), + ) + .expect("Cannot decode the token"); + + assert_eq!(decoded.claims.api_key_uid, api_key_uid); + assert_eq!(decoded.claims.search_rules, json!(SEARCH_RULES)); + } + + #[test] + fn test_generate_token_with_multi_byte_chars() { + let api_key_uid = S("76cf8b87-fd12-4688-ad34-260d930ca4f4"); + let key = "Ëa1ทt9bVcL-vãUทtP3OpXW5qPc%bWH5ทvw09"; + let token = + generate_tenant_token(api_key_uid.clone(), json!(SEARCH_RULES), key, None).unwrap(); + + let decoded = decode::( + &token, + &DecodingKey::from_secret(key.as_ref()), + &build_validation(), + ) + .expect("Cannot decode the token"); + + assert_eq!(decoded.claims.api_key_uid, api_key_uid); + } + + #[test] + fn test_generate_token_with_wrongly_formatted_uid() { + let api_key_uid = S("xxx"); + let key = "Ëa1ทt9bVcL-vãUทtP3OpXW5qPc%bWH5ทvw09"; + let token = generate_tenant_token(api_key_uid, json!(SEARCH_RULES), key, None); + + assert!(token.is_err()); + } + + #[test] + fn test_generate_token_with_wrong_uid_version() { + let api_key_uid = S("6a11eb96-2485-11ed-861d-0242ac120002"); + let key = "Ëa1ทt9bVcL-vãUทtP3OpXW5qPc%bWH5ทvw09"; + let token = generate_tenant_token(api_key_uid, json!(SEARCH_RULES), key, None); + + assert!(token.is_err()); + } +} diff --git a/backend/vendor/meilisearch-sdk/src/utils.rs b/backend/vendor/meilisearch-sdk/src/utils.rs new file mode 100644 index 000000000..65f93ff78 --- /dev/null +++ b/backend/vendor/meilisearch-sdk/src/utils.rs @@ -0,0 +1,45 @@ +use std::time::Duration; + +#[cfg(not(target_arch = "wasm32"))] +pub(crate) async fn async_sleep(interval: Duration) { + let (sender, receiver) = futures::channel::oneshot::channel::<()>(); + std::thread::spawn(move || { + std::thread::sleep(interval); + let _ = sender.send(()); + }); + let _ = receiver.await; +} + +#[cfg(target_arch = "wasm32")] +pub(crate) async fn async_sleep(interval: Duration) { + use std::convert::TryInto; + use wasm_bindgen_futures::JsFuture; + + JsFuture::from(web_sys::js_sys::Promise::new(&mut |yes, _| { + web_sys::window() + .unwrap() + .set_timeout_with_callback_and_timeout_and_arguments_0( + &yes, + interval.as_millis().try_into().unwrap(), + ) + .unwrap(); + })) + .await + .unwrap(); +} + +#[cfg(test)] +mod test { + use super::*; + use meilisearch_test_macro::meilisearch_test; + + #[meilisearch_test] + async fn test_async_sleep() { + let sleep_duration = Duration::from_millis(10); + let now = std::time::Instant::now(); + + async_sleep(sleep_duration).await; + + assert!(now.elapsed() >= sleep_duration); + } +} diff --git a/docs/docs/setup/auth/user/index.md b/docs/docs/setup/auth/user/index.md index 0b3d0c136..87e4f29b3 100644 --- a/docs/docs/setup/auth/user/index.md +++ b/docs/docs/setup/auth/user/index.md @@ -40,6 +40,15 @@ Tobira only gets new data about a user at login, and it's impossible to implemen If you can't use the built-in session management, use `"callback:..."`, which gives you full flexibility. `"trust-auth-headers"` should be avoided as it has some disadvantages compared to `"callback:..."` (header length limits, easier to configure, ...), but you can still use it if it works well within your system. +:::tip + +For debugging your integration, configure the following log filter: + +```toml +[log] +filters."tobira::http" = "trace" +``` +::: ## User information Tobira needs diff --git a/docs/docs/setup/config.toml b/docs/docs/setup/config.toml index 7293a71ea..b4276dbc2 100644 --- a/docs/docs/setup/config.toml +++ b/docs/docs/setup/config.toml @@ -5,11 +5,12 @@ # units: 'ms', 's', 'min', 'h' and 'd'. # # All user-facing texts you can configure here have to be specified per -# language, with two letter language key. Only English ('en') is required. -# Take `general.site_title` for example: +# language, with two letter language key. The special key 'default' is +# required and used as fallback for languages that are not specified +# explicitly. Take `general.site_title` for example: # # [general] -# site_title.en = "My university" +# site_title.default = "My university" # site_title.de = "Meine Universität" # @@ -22,7 +23,7 @@ # Public URL to Tobira (without path). # Used for RSS feeds, as those require specifying absolute URLs to resources. -# +# # Example: "https://tobira.my-uni.edu". # # Required! This value must be specified. @@ -34,16 +35,16 @@ # These can be specified in multiple languages. # Consent is prompted upon first use and only if this is configured. It is # re-prompted when any of these values change. -# +# # We recommend not to configure this unless absolutely necessary, # in order to not degrade the user experience needlessly. -# +# # Example: -# +# # ``` -# initial_consent.title.en = "Terms & Conditions" -# initial_consent.button.en = "Agree" -# initial_consent.text.en = """ +# initial_consent.title.default = "Terms & Conditions" +# initial_consent.button.default = "Agree" +# initial_consent.text.default = """ # To use Tobira, you need to agree to our terms and conditions: # - [Terms](https://www.our-terms.de) # - [Conditions](https://www.our-conditions.de) @@ -62,13 +63,13 @@ # add custom ones. Note that these two default links are special and can # be specified with only the shown string. To add custom ones, you need # to define a label and a link. The link is either the same for every language -# or can be specified for each language in the same manner as the label. +# or can be specified for each language in the same manner as the label. # Example: # # ``` # footer_links = [ -# { label = { en = "Example 1" }, link = "https://example.com" }, -# { label = { en = "Example 2" }, link = { en = "https://example.com/en" } }, +# { label = { default = "Example 1" }, link = "https://example.com" }, +# { label = { default = "Example 2" }, link = { default = "https://example.com/en" } }, # "about", # ] # ``` @@ -79,8 +80,8 @@ # Additional metadata that is shown below a video. Example: # # [general.metadata] -# dcterms.spatial = { en = "Location", de = "Ort" } -# "http://my.domain/xml/namespace".courseLink = { en = "Course", de = "Kurs"} +# dcterms.spatial = { default = "Location", de = "Ort" } +# "http://my.domain/xml/namespace".courseLink = { default = "Course", de = "Kurs"} # # As you can see, this is a mapping of a metadata location (the XML # namespace and the name) to a translated label. For the XML namespace @@ -123,6 +124,25 @@ # Default value: false #users_searchable = false +# This allows users to edit the ACL of events they have write access for. +# Doing so will update these in Opencast and start the `republish-metadata` +# workflow to propagate the changes to other publications as well. +# Instead of waiting for the workflow however, Tobira will also immediately +# store the updated ACL in its database. +# +# Note that this might lead to situations where the event ACL in Tobira is different +# from that in other publications, mainly if the afore mentioned workflow fails +# or takes an unusually long time to complete. +# +# Default value: true +#allow_acl_edit = true + +# Activating this will disable ACL editing for events that are part of a series. +# For the uploader, this means that the ACL of the series will be used. +# +# Default value: false +#lock_acl_to_series = false + [db] # The username of the database user. @@ -379,12 +399,6 @@ # Default value: true #stdout = true -# If set to `true`, HTTP header of each incoming request are logged -# (with 'trace' level). -# -# Default value: false -#log_http_headers = false - [opencast] # URL to Opencast. Currently used for all purposes (syncing, Studio, @@ -421,9 +435,14 @@ # Example: "https://admin.oc.my-uni.edu/editor-ui/index.html". #editor_url = +# Extra Opencast hosts not listed in any other value above, that can also +# be trusted. +# +# Default value: [] +#other_hosts = [] -[sync] -# Username of the user used to communicate with Opencast for data syncing. +# Username of the user used to communicate with Opencast for data syncing +# and external API authentication. # This user has to have access to all events and series. Currently, that # user has to be admin. # @@ -435,6 +454,8 @@ # Required! This value must be specified. #password = + +[sync] # A rough estimate of how many items (events & series) are transferred in # each HTTP request while harvesting (syncing) with the Opencast # instance. @@ -460,6 +481,19 @@ # Default value: "30s" #poll_period = "30s" +# Whether SHA1-hashed series passwords (as assignable by ETH's admin UI +# build) are interpreted in Tobira. +# +# Default value: false +#interpret_eth_passwords = false + +# Number of concurrent tasks with which Tobira downloads assets from +# Opencast. The default should be a good sweet spot. Decrease to reduce +# load on Opencast, increase to speed up download a bit. +# +# Default value: 8 +#concurrent_download_tasks = 8 + [meili] # The access key. This can be the master key, but ideally should be an API @@ -500,30 +534,22 @@ # Required! This value must be specified. #favicon = - # Logo used in the top left corner of the page. Using SVG logos is recommended. -# See the documentation on theming/logos for more info! -[theme.logo] -# The normal, usually wide logo that is shown on desktop screens. The -# value is a map with a `path` and `resolution` key: +# You can configure specific logos for small and large screens, dark and light mode, +# and any number of languages. Example: # -# large = { path = "logo.svg", resolution = [20, 8] } +# ``` +# logos = [ +# { path = "logo-wide-light.svg", mode = "light", size = "wide", resolution = [425, 182] }, +# { path = "logo-wide-dark.svg", mode = "dark", size = "wide", resolution = [425, 182] }, +# { path = "logo-small.svg", size = "narrow", resolution = [212, 182] }, +# ] +# ``` # -# The resolution is only an aspect ratio. It is used to avoid layout -# shifts in the frontend by allocating the correct size for the logo -# before the browser loaded the file. +# See the documentation on theming/logos for more info and additional examples! # # Required! This value must be specified. -#large = - -# A less wide logo used for narrow screens. -#small = - -# Large logo for dark mode usage. -#large_dark = - -# Small logo for dark mode usage. -#small_dark = +#logos = # Colors used in the UI. Specified in sRGB. diff --git a/docs/docs/setup/requirements.md b/docs/docs/setup/requirements.md index 5d1a2bbe9..eebc787ca 100644 --- a/docs/docs/setup/requirements.md +++ b/docs/docs/setup/requirements.md @@ -8,7 +8,7 @@ To run, Tobira requires: - A Unix system. - A **PostgreSQL** (≥12) database (see below for further requirements). -- [**Meilisearch**](https://www.meilisearch.com/) (≥ v1.4). For installation, see [Meili's docs](https://docs.meilisearch.com/learn/getting_started/quick_start.html#step-1-setup-and-installation). +- [**Meilisearch**](https://www.meilisearch.com/) (≥ v1.12). For installation, see [Meili's docs](https://docs.meilisearch.com/learn/getting_started/quick_start.html#step-1-setup-and-installation). - An **Opencast** that satisfies certain condition. See below. diff --git a/docs/docs/setup/theme.md b/docs/docs/setup/theme.md index f56776f2f..171b9cb0c 100644 --- a/docs/docs/setup/theme.md +++ b/docs/docs/setup/theme.md @@ -21,14 +21,57 @@ Once the logo file is created and configured, adjust `header_height` to your lik This is the height of the header (and thus also your logo) in pixels. Only the logo is stretched, all other elements are vertically centered within the header. -You can also configure a second logo file as `logo.small` which is used for narrow screens (e.g. phones). -This is usually roughly square. -We strongly recommend setting this smaller logo, as otherwise, the main logo (especially if it is very wide) might get shrunk on narrow screens in order to still show the other elements in the header. - -You should also test if the logo is properly configured for dark mode: -- To use a different image for dark mode, set `logo.large_dark` and `logo.small_dark` appropriately. -- If your normal logo already works well for dark mode, set `logo.large_dark` and `logo.small_dark` to the same values as `large` and `small`, respectively. -- If `logo.large_dark` and `logo.small_dark` are not set, `large` and `small` are used, but with all colors inverted. This might work for you in special cases, e.g. if your logo is fully black or transparent. + +You can configure different logo files for different cases, depending on device-size (mobile vs desktop), color scheme (light vs dark) and language. In the simplest case of using a single logo for all cases, your config looks like this: + +```toml title=config.toml +[theme] +logos = [ + { path = "logo.svg", resolution = [20, 8] }, +] +``` + +Note that the resolution is only an aspect ratio that is used to prevent layout shifts. + +Most likely, you want to configure different logos for desktop and mobile, for example. +You can do that by duplicating the line and adding `size = "narrow"` and `size = "wide"` to the entries: + +```toml title=config.toml +[theme] +logos = [ + { size = "wide", path = "logo-desktop.svg", resolution = [20, 8] }, + { size = "narrow", path = "logo-mobile.svg", resolution = [1, 1] }, +] +``` + +You can split these entries further by adding `mode = "light"` and `mode = "dark"`. +Let's say you only need to configure a dark version of your large logo, as your small one works well in dark mode already: + +```toml title=config.toml +[theme] +logos = [ + { size = "wide", mode = "light", path = "logo-desktop-light.svg", resolution = [20, 8] }, + { size = "wide", mode = "dark", path = "logo-desktop-dark.svg", resolution = [20, 8] }, + { size = "narrow", path = "logo-mobile.svg", resolution = [1, 1] }, +] +``` + +Finally, you can add the `lang = ".."` field to specify different logos for different languages (e.g. `"en"` and `"de"`). +Here, `"*"` can be specified as the default logo when there is no logo specified for a specific language. +Continuing the example, lets say the wide logos are language specific in that we have a specific logo for German and want to use another one for all other languages. + +```toml title=config.toml +[theme] +logos = [ + { size = "wide", mode = "light", lang = "*", path = "logo-desktop-light.svg", resolution = [20, 8] }, + { size = "wide", mode = "light", lang = "de", path = "logo-desktop-light-de.svg", resolution = [20, 8] }, + { size = "wide", mode = "dark", lang = "*", path = "logo-desktop-dark.svg", resolution = [20, 8] }, + { size = "wide", mode = "dark", lang = "de", path = "logo-desktop-dark-de.svg", resolution = [20, 8] }, + { size = "narrow", path = "logo-mobile.svg", resolution = [1, 1] }, +] +``` + +The order in which these distinguishing fields (`size`, `mode`, `lang`) are added is up to you, and you can can split and merge these entries however you like, as long as for each specific case (i.e. tuple of `(size, mode, lang)`), there is exactly one applicable logo definition. ## Favicon diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 5154b2ef2..03a97ea10 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -24,11 +24,12 @@ "hls.js": "^1.5.15", "i18next": "^23.15.2", "i18next-browser-languagedetector": "^8.0.0", + "lucide-react": "^0.439.0", "paella-basic-plugins": "1.44.10", - "paella-core": "1.49.6", + "paella-core": "1.50.2", "paella-mp4multiquality-plugin": "1.47.1", - "paella-skins": "1.48.0", - "paella-slide-plugins": "1.48.1", + "paella-skins": "1.48.1", + "paella-slide-plugins": "1.50.1", "paella-user-tracking": "1.42.5", "paella-zoom-plugin": "1.41.3", "qrcode.react": "^4.0.1", @@ -6981,6 +6982,14 @@ "yallist": "^3.0.2" } }, + "node_modules/lucide-react": { + "version": "0.439.0", + "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.439.0.tgz", + "integrity": "sha512-PafSWvDTpxdtNEndS2HIHxcNAbd54OaqSYJO90/b63rab2HWYqDbH194j0i82ZFdWOAcf0AHinRykXRRK2PJbw==", + "peerDependencies": { + "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0-rc" + } + }, "node_modules/make-error": { "version": "1.3.6", "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", @@ -7909,9 +7918,9 @@ } }, "node_modules/paella-core": { - "version": "1.49.6", - "resolved": "https://registry.npmjs.org/paella-core/-/paella-core-1.49.6.tgz", - "integrity": "sha512-90tZ+0AuAukCrTMykvCx4FbVhzSsS96nEgGvGr6DHp/hf7fW87ncye0GzntaAaB+ndmAs+FQl8oHRiZtUuYgbw==", + "version": "1.50.2", + "resolved": "https://registry.npmjs.org/paella-core/-/paella-core-1.50.2.tgz", + "integrity": "sha512-uh6WvxgyKkoIQ4vwe0TIlvaG7zVFyZhYVLmTG+8+qLd2eiWPoobH7N+t9Om41i2apbuqq6jGAv8UH+ZIUzZ5Rg==", "dependencies": { "core-js": "^3.8.2", "hls.js": "^1.0.4" @@ -7926,16 +7935,18 @@ } }, "node_modules/paella-skins": { - "version": "1.48.0", - "resolved": "https://registry.npmjs.org/paella-skins/-/paella-skins-1.48.0.tgz", - "integrity": "sha512-ABnRvt95adrg20TBmWNoUlSVCJEzGOnKJqrmNabxilF2/dulWs4STdrShBE8EsdxHDonQqC/fGei5lg9IarCxQ==" + "version": "1.48.1", + "resolved": "https://registry.npmjs.org/paella-skins/-/paella-skins-1.48.1.tgz", + "integrity": "sha512-Fttg9g2zF2mH+ao6Nigs0l3BcEt2pZTv70uJBXSpqb2e+/j9x0N/s3oVtQBxhXucdn5I1SUPxp40UYSO/oiFMA==", + "license": "ECL-2.0" }, "node_modules/paella-slide-plugins": { - "version": "1.48.1", - "resolved": "https://registry.npmjs.org/paella-slide-plugins/-/paella-slide-plugins-1.48.1.tgz", - "integrity": "sha512-R92Y7+++Kq7PFaJMrWYc1bzUyo882TmGPJsh2g3UNiHS7O+9NOzWgeO/jZl/izWj9rNjdt/+GIHiVWBz62FqNA==", + "version": "1.50.1", + "resolved": "https://registry.npmjs.org/paella-slide-plugins/-/paella-slide-plugins-1.50.1.tgz", + "integrity": "sha512-HJcQCTIIUL9Hc+ibwJH+ElOqu1xHNvAjJ7OsiLk8gGKbpUzFe4/DzqjbomHa/wQr39a+LwY6dODi7guM/XRbSA==", + "license": "SEE LICENSE IN license.txt", "dependencies": { - "paella-core": "^1.48.2" + "paella-core": "^1.50.2" } }, "node_modules/paella-user-tracking": { diff --git a/frontend/package.json b/frontend/package.json index 6c030d203..b1555061b 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -40,11 +40,12 @@ "hls.js": "^1.5.15", "i18next": "^23.15.2", "i18next-browser-languagedetector": "^8.0.0", + "lucide-react": "^0.439.0", "paella-basic-plugins": "1.44.10", - "paella-core": "1.49.6", + "paella-core": "1.50.2", "paella-mp4multiquality-plugin": "1.47.1", - "paella-skins": "1.48.0", - "paella-slide-plugins": "1.48.1", + "paella-skins": "1.48.1", + "paella-slide-plugins": "1.50.1", "paella-user-tracking": "1.42.5", "paella-zoom-plugin": "1.41.3", "qrcode.react": "^4.0.1", diff --git a/frontend/relay.config.js b/frontend/relay.config.js index 1da690d83..a513132a6 100644 --- a/frontend/relay.config.js +++ b/frontend/relay.config.js @@ -8,10 +8,11 @@ module.exports = { schema: path.join(APP_PATH, "schema.graphql"), language: "typescript", customScalarTypes: { - "DateTimeUtc": "string", + "DateTime": "string", "Cursor": "string", + "ByteSpan": "string", "ExtraMetadata": "Record>", - "TranslatedString": "Record", + "TranslatedString": "{ default: string } & Record", }, schemaExtensions: [APP_PATH], }; diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index 83fde1f1d..cea82f4ac 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -1,4 +1,4 @@ -import React, { ReactNode, StrictMode } from "react"; +import React, { ReactNode, StrictMode, Suspense } from "react"; import { RelayEnvironmentProvider } from "react-relay/hooks"; import { CacheProvider } from "@emotion/react"; import createEmotionCache from "@emotion/cache"; @@ -7,7 +7,7 @@ import { GlobalErrorBoundary } from "./util/err"; import { environment } from "./relay"; import { GlobalStyle } from "./GlobalStyle"; import { ActiveRoute, Router } from "./router"; -import { MatchedRoute } from "./rauta"; +import { RouteMatchInfo } from "./rauta"; import { MenuProvider } from "./layout/MenuState"; import { GraphQLErrorBoundary } from "./relay/boundary"; import { LoadingIndicator } from "./ui/LoadingIndicator"; @@ -19,10 +19,11 @@ import { } from "@opencast/appkit"; import { COLORS } from "./color"; import { InitialConsent } from "./ui/InitialConsent"; +import { InitialLoading } from "./layout/Root"; type Props = { - initialRoute: MatchedRoute; + initialRoute: RouteMatchInfo; consentGiven: boolean | null; }; @@ -39,7 +40,9 @@ export const App: React.FC = ({ initialRoute, consentGiven }) => ( - + }> + + diff --git a/frontend/src/config.ts b/frontend/src/config.ts index 92ce23da3..2ef434e59 100644 --- a/frontend/src/config.ts +++ b/frontend/src/config.ts @@ -27,10 +27,12 @@ type Config = { initialConsent: InitialConsent | null; showDownloadButton: boolean; usersSearchable: boolean; + allowAclEdit: boolean; + lockAclToSeries: boolean; opencast: OpencastConfig; footerLinks: FooterLink[]; metadataLabels: Record>; - logo: LogoConfig; + logos: LogoConfig; plyr: PlyrConfig; upload: UploadConfig; paellaPluginConfig: object; @@ -62,16 +64,12 @@ type AuthConfig = { }; type LogoConfig = { - large: SingleLogoConfig; - small: SingleLogoConfig | null; - largeDark: SingleLogoConfig | null; - smallDark: SingleLogoConfig | null; -}; - -type SingleLogoConfig = { + size: "wide" | "narrow"| null; + mode: "light" | "dark"| null; + lang: string | null; path: string; resolution: number[]; -}; +}[]; type PlyrConfig = { blankVideo: string; @@ -104,7 +102,7 @@ type SyncConfig = { type MetadataLabel = "builtin:license" | "builtin:source" | TranslatedString; -export type TranslatedString = { en: string } & Record<"de", string | undefined>; +export type TranslatedString = { default: string } & Record<"en" | "de", string | undefined>; const CONFIG: Config = parseConfig(); export default CONFIG; diff --git a/frontend/src/i18n/locales/de.yaml b/frontend/src/i18n/locales/de.yaml index 401bc2b9b..e6762a117 100644 --- a/frontend/src/i18n/locales/de.yaml +++ b/frontend/src/i18n/locales/de.yaml @@ -48,6 +48,7 @@ errors: might-need-to-login-link: Sie müssen sich möglicherweise <1>einloggen. invalid-input: Ungültige Eingabe. opencast-unavailable: Opencast ist momentan nicht erreichbar. + opencast-error: Unerwartete Opencast Antwort. internal-server-error: Interner Server-Fehler (es ist ein Problem mit dem Server aufgetreten). are-you-connected-to-internet: Sind Sie mit dem Internet verbunden? unknown: Unbekannter Fehler. @@ -114,7 +115,7 @@ user: manage-content: Verwalten login-page: - heading: Anmeldung + heading: Anmeldung user-id: Nutzerkennung password: Passwort bad-credentials: 'Anmeldung fehlgeschlagen: Falsche Anmeldedaten.' @@ -130,9 +131,11 @@ video: link: Zur Videoseite part-of-series: Teil der Serie more-from-series: Mehr von „{{series}}“ - more-from-playlist: Mehr von “{{playlist}}” + more-from-playlist: Mehr von „{{playlist}}” deleted-video-block: Das hier referenzierte Video wurde gelöscht. not-allowed-video-block: Sie sind nicht autorisiert, das hier eingebettete Video zu sehen. + preview: Vorschau + preview-only: Sie brauchen zusätzliche Berichtigungen, um dieses Video anzuschauen. not-ready: title: Video noch nicht verarbeitet text: > @@ -178,6 +181,17 @@ video: embed: Einbetten rss: RSS show-qr-code: QR Code anzeigen + password: + heading: Geschütztes Video + sub-heading: Zugriff zu diesem Video ist beschränkt. + body: > + Bitte geben Sie die Zugangsdaten ein, die Sie für dieses Videos erhalten haben. + Beachten Sie, dass diese nicht Ihren Logindaten für dieses Portal entsprechen. + label: + id: Kennung + password: Passwort + submit: Freischalten + invalid-credentials: Ungültige Zugangsdaten. playlist: deleted-playlist-block: Die hier referenzierte Playlist wurde gelöscht. @@ -307,6 +321,7 @@ upload: opencast-server-error: Opencast-Server-Fehler (unerwartete Antwort). opencast-unreachable: 'Netzwerkfehler: Opencast kann nicht erreicht werden.' jwt-invalid: 'Interner Fremdauthentifizierungsfehler: Opencast hat das Hochladen nicht autorisiert.' + failed-fetching-series-acl: Abruf der Serienzugangsberechtigungen fehlgeschlagen. acl: unknown-user-note: unbekannt @@ -322,6 +337,13 @@ manage: access: authorized-groups: Autorisierte Gruppen authorized-users: Autorisierte Personen + editing-disabled: Bearbeitung der Zugangsberechtigungen ist deaktiviert. + workflow-active: > + Änderung der Berechtigungen ist zurzeit nicht möglich, da das Video im Hintergrund verarbeitet wird. +
+ Bitte versuchen Sie es später nochmal. + locked-to-series: > + Die Berechtigungen dieses Videos werden durch seine Serie bestimmt und können daher nicht bearbeitet werden. users-no-options: initial-searchable: Nach Name suchen oder exakten Nutzernamen/exakte E-Mail angeben none-found-searchable: Keine Personen gefunden @@ -337,8 +359,9 @@ manage: yourself: Sie subset-warning: 'Diese Auswahl ist bereits in den folgenden Gruppen enthalten: {{groups}}.' inherited: 'Geerbte Berechtigungen:' - inherited-tooltip: > - Die folgenden Berechtigungen wurden auf einer übergeordneten Seite gewährt und werden auf alle Unterseiten vererbt. + inherited-tooltip: > + Die folgenden Berechtigungen wurden auf einer übergeordneten Seite gewährt und werden auf alle Unterseiten + vererbt. actions: title: Berechtigung read: Lesen @@ -483,11 +506,9 @@ manage: name-from-block-description: > Video/Serie-Element von dieser Seite verknüpfen, sodass der Seitenname immer dem Titel des verknüpften Elements entspricht. + no-name: Keinen Namen anzeigen no-blocks: Auf dieser Seite befinden sich keine verknüpfbaren Videos/Serien. rename-failed: Änderung des Namen fehlgeschlagen. - no-rename-root: > - Der Name der Startseite kann nicht geändert werden. Die Überschrift wird - von der globalen Seitentitel-Einstellung kontrolliert. children: heading: Reihenfolge Unterseiten @@ -648,6 +669,12 @@ api-remote-errors: delete: not-found: Das zu löschende Video existiert nicht. Möglicherweise wurde es bereits entfernt. not-allowed: Sie haben nicht die Berechtigung, dieses Video zu löschen. + acl: + not-found: "Zugriffsrechte konnten nicht geändert werden: Video nicht gefunden." + not-allowed: Sie haben nicht die Berechtigung, die Zugriffsreche dieses Videos zu ändern. + workflow: + not-allowed: Sie haben nicht die Berechtigung, die Workflowaktivität für dieses Video abzufragen. + active: $t(manage.access.workflow-active) embed: not-supported: Diese Seite kann nicht eingebettet werden. diff --git a/frontend/src/i18n/locales/en.yaml b/frontend/src/i18n/locales/en.yaml index 3dd91daf6..8afc3eed2 100644 --- a/frontend/src/i18n/locales/en.yaml +++ b/frontend/src/i18n/locales/en.yaml @@ -47,6 +47,7 @@ errors: might-need-to-login-link: You might need to <1>login. invalid-input: Invalid input. opencast-unavailable: Opencast is currently not available. + opencast-error: Opencast returned an unexpected response. internal-server-error: Internal server error (something is wrong with the server). are-you-connected-to-internet: Are you connected to the internet? unknown: Unknown error. @@ -112,7 +113,7 @@ user: manage-content: Manage login-page: - heading: Login + heading: Login user-id: User ID password: Password bad-credentials: 'Login failed: invalid credentials.' @@ -131,6 +132,8 @@ video: more-from-playlist: More from “{{playlist}}” deleted-video-block: The video referenced here was deleted. not-allowed-video-block: You are not allowed to view the video embedded here. + preview: Preview + preview-only: You need additional permissions to watch this video. not-ready: title: Video not processed yet text: > @@ -161,7 +164,7 @@ video: title: Download presenter: Video (speaker) slides: Video (presentation) - info: > + info: > Here you can download the video(s) in different formats/qualities (Right click - Save as). manage: Manage start: Start @@ -175,6 +178,18 @@ video: embed: Embed rss: RSS show-qr-code: Show QR code + password: + heading: Protected Video + sub-heading: Access to this video is restricted. + no-preview-permission: $t(api-remote-errors.view.event) + body: > + Please enter the username and the password you have received to access this video; + please note that these are not your login credentials. + label: + id: Identifier + password: Password + submit: Verify + invalid-credentials: Invalid credentials. playlist: deleted-playlist-block: The playlist referenced here was deleted. @@ -303,6 +318,7 @@ upload: opencast-server-error: Opencast server error (unexpected response). opencast-unreachable: 'Network error: Opencast cannot be reached.' jwt-invalid: 'Internal cross-authentication error: Opencast did not authorize the upload.' + failed-fetching-series-acl: Failed to fetch series acl. acl: unknown-user-note: unknown @@ -318,6 +334,13 @@ manage: access: authorized-groups: Authorized groups authorized-users: Authorized users + editing-disabled: Access policy editing is disabled. + workflow-active: > + Changing the access policy is not possible at this time, since the video is being processed in the background. +
+ Please try again later. + locked-to-series: > + The access policy of this video is determined by its series and can't be edited. users-no-options: initial-searchable: Type to search for users by name (or enter exact email/username) none-found-searchable: No user found @@ -462,11 +485,9 @@ manage: name-from-block: Derive name from video or series name-from-block-description: > Link a video/series-element from this page: the page name will always be the title of the linked element. + no-name: Omit name no-blocks: There are no linkable video/series on this page. rename-failed: Failed to change the name. - no-rename-root: > - The homepage cannot be renamed. The heading is controlled by the global - site title setting. children: heading: Order of subpages @@ -623,6 +644,12 @@ api-remote-errors: The video you are trying to delete does not exist. It might have been removed already. not-allowed: You are not allowed to delete this video. + acl: + not-found: "Access policy update failed: video not found." + not-allowed: You are not allowed to update the access policies of this video. + workflow: + not-allowed: You are not allowed to inquire about workflow activity of this video. + active: $t(manage.access.workflow-active) embed: not-supported: This page can't be embedded. diff --git a/frontend/src/icons/series.svg b/frontend/src/icons/series.svg new file mode 100644 index 000000000..a61722615 --- /dev/null +++ b/frontend/src/icons/series.svg @@ -0,0 +1,2 @@ + + diff --git a/frontend/src/layout/Root.tsx b/frontend/src/layout/Root.tsx index 3b9acf8ec..081fa03a3 100644 --- a/frontend/src/layout/Root.tsx +++ b/frontend/src/layout/Root.tsx @@ -1,4 +1,4 @@ -import React, { ReactNode, Suspense } from "react"; +import React, { ReactNode, useEffect, useMemo, useRef } from "react"; import { keyframes } from "@emotion/react"; import { useTranslation } from "react-i18next"; import { screenWidthAtMost } from "@opencast/appkit"; @@ -15,6 +15,7 @@ import { GraphQLTaggedNode, PreloadedQuery, useFragment, usePreloadedQuery } fro import { OperationType } from "relay-runtime"; import { UserData$key } from "../__generated__/UserData.graphql"; import { useNoindexTag } from "../util"; +import { useRouter } from "../router"; export const MAIN_PADDING = 16; @@ -129,13 +130,7 @@ type RootLoaderProps = { }; /** Entry point for almost all routes: loads the GraphQL query and renders the main page layout */ -export const RootLoader = (props: RootLoaderProps) => ( - }> - - -); - -export const RootLoaderImpl = ({ +export const RootLoader = ({ query, queryRef, nav, @@ -146,9 +141,31 @@ export const RootLoaderImpl = ({ const data = usePreloadedQuery(query, queryRef); const userData = useFragment(userDataFragment, data); + // We use a counter to force rerendering of the main part, whenever the user + // navigates. This is an unfortunate hack for some cases where routes are + // not rerendered. For example, the upload route, after uploading a video, + // clicking on "upload video" in the user menu again does nothing without + // this hack. + const counter = useRef(0); + const router = useRouter(); + useEffect(() => router.listenBeforeNav(() => { + counter.current += 1; + return undefined; + })); + + // Unfortunately, `` and `` are still rendered + // more than they need to on router navigation. I could not figure out how + // to fix that. So here, we at least memoize the rendering of the whole + // page, so that we don't rerun expensive rendering. + const content = useMemo(() => ( + + {render(data)} + + ), [render, nav, data]); + return ( - {render(data)} + {content} ); }; diff --git a/frontend/src/layout/header/Logo.tsx b/frontend/src/layout/header/Logo.tsx index 3ed52e892..2defa9c24 100644 --- a/frontend/src/layout/header/Logo.tsx +++ b/frontend/src/layout/header/Logo.tsx @@ -1,18 +1,20 @@ import { useTranslation } from "react-i18next"; -import { screenWidthAbove, screenWidthAtMost, useColorScheme } from "@opencast/appkit"; +import { screenWidthAbove, screenWidthAtMost } from "@opencast/appkit"; import CONFIG from "../../config"; import { BREAKPOINT_SMALL } from "../../GlobalStyle"; import { Link } from "../../router"; import { focusStyle } from "../../ui"; -import { translatedConfig } from "../../util"; +import { translatedConfig, useLogoConfig } from "../../util"; import { HEADER_BASE_PADDING } from "./ui"; import { COLORS } from "../../color"; export const Logo: React.FC = () => { const { t, i18n } = useTranslation(); - const isDark = useColorScheme().scheme === "dark"; + const logos = useLogoConfig(); + + const alt = t("general.logo-alt", { title: translatedConfig(CONFIG.siteTitle, i18n) }); // This is a bit tricky: we want to specify the `width` and `height` // attributes on the `img` elements in order to avoid layout shift. That @@ -32,24 +34,6 @@ export const Logo: React.FC = () => { // The solution is to calculate the correct `flex-basis` for the `` // element manually. - const large = CONFIG.logo.large; - const small = CONFIG.logo.small ?? CONFIG.logo.large; - const largeDark = CONFIG.logo.largeDark ?? CONFIG.logo.large; - const smallDark = CONFIG.logo.smallDark - ?? CONFIG.logo.largeDark - ?? CONFIG.logo.small - ?? CONFIG.logo.large; - - // If the dark logos are not specified, we default to the white ones but - // inverting them. - const invertLargeDark = CONFIG.logo.largeDark === null; - const invertSmallDark = CONFIG.logo.smallDark === null && CONFIG.logo.largeDark === null; - - const alt = t("general.logo-alt", { title: translatedConfig(CONFIG.siteTitle, i18n) }); - const invertCss = { - filter: "invert(100%) hue-rotate(180deg)", - }; - return ( { }, }}> {alt} {alt} = ({ variant }) => { const { t } = useTranslation(); const router = useRouter(); const ref = useRef(null); - const { debounce } = useDebounce(); // If the user is unknown, then we are still in the initial loading phase. // We don't want users to input anything into the search field in that @@ -71,9 +69,6 @@ export const SearchField: React.FC = ({ variant }) => { const iconStyle = { position: "absolute", right: paddingSpinner, top: paddingSpinner } as const; - const lastTimeout = useRef | undefined>(undefined); - useEffect(() => () => clearTimeout(lastTimeout.current)); - const onSearchRoute = isSearchActive(); const getSearchParam = (searchParameter: string) => { const searchParams = new URLSearchParams(document.location.search); @@ -84,14 +79,18 @@ export const SearchField: React.FC = ({ variant }) => { const defaultValue = getSearchParam("q"); - const search = useCallback(debounce((expression: string) => { + const search = (q: string) => { + if (!(q in SEARCH_TIMINGS)) { + SEARCH_TIMINGS[q] = {}; + } + SEARCH_TIMINGS[q].startSearch = window.performance.now(); const filters = { itemType: isValidSearchItemType(getSearchParam("f")), start: getSearchParam("start"), end: getSearchParam("end"), }; - router.goto(SearchRoute.url({ query: expression, ...filters }), onSearchRoute); - }, 250), []); + router.goto(SearchRoute.url({ query: q, ...filters })); + }; return (
= ({ variant }) => { }} />
{ event.preventDefault(); - clearTimeout(lastTimeout.current); search(currentRef(ref).value); + + // Hide mobile keyboard on enter. The mobile keyboard hides lots + // of results and intuitively, pressing "enter" on it should + // close the keyboard. We don't want to remove focus for + // desktop users though, since that doesn't do any good. The + // check is not perfect but should actually detect virtual + // keyboard very reliably. + const visualHeight = window.visualViewport?.height; + if (visualHeight && visualHeight < window.innerHeight) { + ref.current?.blur(); + } }}>
- {router.isTransitioning && isSearchActive() && } - {!router.isTransitioning && isSearchActive() && handleNavigation(router, ref)} + { + const input = currentRef(ref); + input.value = ""; + input.focus(); + }} css={{ ":hover, :focus": { color: COLORS.neutral90, @@ -177,7 +181,7 @@ export const SearchField: React.FC = ({ variant }) => { color: COLORS.neutral60, ...iconStyle, }} - >} + >
); }; diff --git a/frontend/src/layout/header/UserBox.tsx b/frontend/src/layout/header/UserBox.tsx index e542bac19..400ea1701 100644 --- a/frontend/src/layout/header/UserBox.tsx +++ b/frontend/src/layout/header/UserBox.tsx @@ -25,6 +25,7 @@ import { UploadRoute } from "../../routes/Upload"; import { ManageRoute } from "../../routes/manage"; import { ManageVideosRoute } from "../../routes/manage/Video"; import { LoginLink } from "../../routes/util"; +import { CREDENTIALS_STORAGE_KEY } from "../../routes/Video"; @@ -175,6 +176,10 @@ const LoggedIn: React.FC = ({ user }) => { return; } + Object.keys(window.localStorage) + .filter(item => item.startsWith(CREDENTIALS_STORAGE_KEY)) + .forEach(item => window.localStorage.removeItem(item)); + setLogoutState("pending"); fetch("/~session", { method: "DELETE", keepalive: true }) .then(() => { diff --git a/frontend/src/layout/header/index.tsx b/frontend/src/layout/header/index.tsx index 1fe28ca74..eed1c51c7 100644 --- a/frontend/src/layout/header/index.tsx +++ b/frontend/src/layout/header/index.tsx @@ -21,7 +21,6 @@ type Props = { export const Header: React.FC = ({ hideNavIcon = false, loginMode = false }) => { const menu = useMenu(); - const content = match(menu.state, { "closed": () => , "search": () => , diff --git a/frontend/src/rauta.tsx b/frontend/src/rauta.tsx index bb5fe354e..fe891a435 100644 --- a/frontend/src/rauta.tsx +++ b/frontend/src/rauta.tsx @@ -1,4 +1,4 @@ -import React, { useEffect, useRef, useState, useTransition } from "react"; +import React, { useCallback, useEffect, useMemo, useRef, useState, useTransition } from "react"; import { bug } from "@opencast/appkit"; @@ -30,6 +30,12 @@ export type MatchedRoute = { dispose?: () => void; }; +export type RouteMatchInfo = { + url: URL; + route: Route | FallbackRoute; + matchedRoute: MatchedRoute; +} + // /** Creates the internal representation of the given route. */ // export const makeRoute = (match: (url: URL) => MatchedRoute | null): Route => ({ match }); @@ -55,7 +61,7 @@ type LinkProps = { /** Props of the `` component. */ type RouterProps = { - initialRoute: MatchedRoute; + initialRoute: RouteMatchInfo; children: JSX.Element; }; @@ -64,17 +70,20 @@ export type RouterLib = { * Matches the given full href against all routes, returning the first * matched route or throwing an error if no route matches. */ - matchRoute: (href: string) => MatchedRoute; + matchRoute: (href: string) => RouteMatchInfo; /** * Like `matchRoute(window.location.href)`. Intended to be called before * `React.render` to obtain the initial route for the application. */ - matchInitialRoute: () => MatchedRoute; + matchInitialRoute: () => RouteMatchInfo; /** Hook to obtain a reference to the router. */ useRouter: () => RouterControl; + /** Hook to obtain the router state. */ + useRouterState: () => RouterState; + /** * An internal link, using the defined routes. Should be used instead of * `
`. Has to be mounted below a ``! @@ -96,7 +105,8 @@ export type RouterLib = { }; /** Helper class: a list of listeners */ -class Listeners unknown> { +// eslint-disable-next-line @typescript-eslint/no-explicit-any +class Listeners unknown> { private list: { listener: F }[] = []; /** Pass through the iterable protocol to the inner list */ @@ -118,12 +128,15 @@ class Listeners unknown> { /** Call all listeners with the same arguments. */ public callAll(args: Parameters) { for (const { listener } of this.list) { - listener(args); + listener(...args); } } } -export type AtNavListener = () => void; +export type AtNavListener = (info: { + newRoute: Route | FallbackRoute; + newUrl: URL; +}) => void; export type BeforeNavListener = () => "prevent-nav" | undefined; /** Obtained via `useRouter`, allowing you to perform some routing-related actions. */ @@ -167,18 +180,20 @@ export interface RouterControl { */ listenBeforeNav(listener: BeforeNavListener): () => void; - /** - * Indicates whether we are currently transitioning to a new route. Intended - * to show a loading indicator. - */ - isTransitioning: boolean; - /** * Indicates whether a user navigated to the current route from outside Tobira. */ internalOrigin: boolean; } +export type RouterState = { + /** + * Indicates whether we are currently transitioning to a new route. Intended + * to show a loading indicator. + */ + isTransitioning: boolean; +}; + export const makeRouter = (config: C): RouterLib => { // Helper to log debug messages if `config.debug` is true. const debugLog = (...args: unknown[]) => { @@ -244,7 +259,6 @@ export const makeRouter = (config: C): RouterLib => { } return { - isTransitioning: context.isTransitioning, push, replace, listenAtNav: (listener: AtNavListener) => @@ -280,6 +294,9 @@ export const makeRouter = (config: C): RouterLib => { const handleClick = (e: React.MouseEvent) => { // If the caller specified a handler, we will call it first. onClick?.(e); + if (e.isDefaultPrevented()) { + return; + } // We only want to react to simple mouse clicks. if (e.ctrlKey || e.metaKey || e.altKey || e.shiftKey || e.button !== 0) { @@ -294,23 +311,27 @@ export const makeRouter = (config: C): RouterLib => { }, ); - const matchRoute = (href: string): MatchedRoute => { + const matchRoute = (href: string): RouteMatchInfo => { const url = new URL(href); for (const route of config.routes) { - const matched: MatchedRoute | null = route.match(url); + const matched = route.match(url); if (matched !== null) { - return matched; + return { url, route, matchedRoute: matched }; } } - return config.fallback.prepare(url); + return { + url, + route: config.fallback, + matchedRoute: config.fallback.prepare(url), + }; }; - const matchInitialRoute = (): MatchedRoute => matchRoute(window.location.href); + const matchInitialRoute = (): RouteMatchInfo => matchRoute(window.location.href); type ActiveRoute = { - route: MatchedRoute; + route: RouteMatchInfo; /** A scroll position that should be restored when the route is first rendered */ initialScroll: number | null; @@ -323,12 +344,24 @@ export const makeRouter = (config: C): RouterLib => { atNav: Listeners; beforeNav: Listeners; }; - isTransitioning: boolean; }; const Context = React.createContext(null); + type StateContextData = { + isTransitioning: boolean; + }; + const StateContext = React.createContext(null); + const useRouter = (): RouterControl => useRouterImpl("`useRouter`"); + const useRouterState = (): RouterState => { + const context = React.useContext(StateContext); + if (context === null) { + return bug("useRouterState used without a parent ! That's not allowed."); + } + + return context; + }; /** Provides the required context for `` and `` components. */ const Router = ({ initialRoute, children }: RouterProps) => { @@ -347,13 +380,16 @@ export const makeRouter = (config: C): RouterLib => { // `StrictMode` work, as with that, this component might be unmounted // for reasons other than a route change. const navigatedAway = useRef(false); - const setActiveRoute = (newRoute: ActiveRoute) => { + const setActiveRoute = useCallback((newRoute: ActiveRoute) => { navigatedAway.current = true; startTransition(() => { setActiveRouteRaw(() => newRoute); - listeners.current.atNav.callAll([]); + listeners.current.atNav.callAll([{ + newUrl: newRoute.route.url, + newRoute: newRoute.route.route, + }]); }); - }; + }, [navigatedAway, setActiveRouteRaw, listeners]); // Register some event listeners and set global values. useEffect(() => { @@ -440,20 +476,23 @@ export const makeRouter = (config: C): RouterLib => { // Dispose of routes when they are no longer needed. useEffect(() => () => { - if (navigatedAway.current && activeRoute.route.dispose) { + if (navigatedAway.current && activeRoute.route.matchedRoute.dispose) { debugLog("Disposing of route: ", activeRoute); - activeRoute.route.dispose(); + activeRoute.route.matchedRoute.dispose(); } }, [activeRoute, navigatedAway]); - const contextData = { + const contextData = useMemo(() => ({ setActiveRoute, activeRoute, listeners: listeners.current, - isTransitioning: isPending, - }; + }), [activeRoute, setActiveRoute, listeners]); - return {children}; + return + + {children} + + ; }; const ActiveRoute = () => { @@ -470,9 +509,8 @@ export const makeRouter = (config: C): RouterLib => { } }, [context.activeRoute]); - return - {context.activeRoute.route.render()} - ; + // Rendered via JSX, as just calling `render()` causes unnecessary rerenders + return ; }; return { @@ -480,6 +518,7 @@ export const makeRouter = (config: C): RouterLib => { matchRoute, matchInitialRoute, useRouter, + useRouterState, ActiveRoute, Router, }; diff --git a/frontend/src/relay/errors.ts b/frontend/src/relay/errors.ts index 2a2f9eb0c..8d3dba33f 100644 --- a/frontend/src/relay/errors.ts +++ b/frontend/src/relay/errors.ts @@ -115,4 +115,5 @@ export type ErrorKind = "INVALID_INPUT" | "NOT_AUTHORIZED" | "INTERNAL_SERVER_ERROR" | "OPENCAST_UNAVAILABLE" + | "OPENCAST_ERROR" ; diff --git a/frontend/src/router.tsx b/frontend/src/router.tsx index 86d39631f..9cae3edc2 100644 --- a/frontend/src/router.tsx +++ b/frontend/src/router.tsx @@ -13,7 +13,12 @@ import { OpencastVideoRoute, VideoRoute, } from "./routes/Video"; -import { DirectSeriesOCRoute, DirectSeriesRoute } from "./routes/Series"; +import { + DirectSeriesOCRoute, + DirectSeriesRoute, + OpencastSeriesRoute, + SeriesRoute, +} from "./routes/Series"; import { ManageVideosRoute } from "./routes/manage/Video"; import { UploadRoute } from "./routes/Upload"; import { SearchRoute } from "./routes/Search"; @@ -34,6 +39,7 @@ const { matchRoute, Router, useRouter, + useRouterState, } = makeRouter({ fallback: NotFoundRoute, routes: [ @@ -45,6 +51,8 @@ const { SearchRoute, OpencastVideoRoute, VideoRoute, + OpencastSeriesRoute, + SeriesRoute, DirectVideoRoute, DirectOpencastVideoRoute, DirectSeriesRoute, @@ -65,7 +73,7 @@ const { ], }); -export { ActiveRoute, Link, matchInitialRoute, matchRoute, Router, useRouter }; +export { ActiveRoute, Link, matchInitialRoute, matchRoute, Router, useRouter, useRouterState }; type LinkProps = { to: string; diff --git a/frontend/src/routes/Embed.tsx b/frontend/src/routes/Embed.tsx index 79908e81b..05752af07 100644 --- a/frontend/src/routes/Embed.tsx +++ b/frontend/src/routes/Embed.tsx @@ -2,11 +2,12 @@ import { ReactNode, Suspense } from "react"; import { LuFrown, LuAlertTriangle } from "react-icons/lu"; import { Translation, useTranslation } from "react-i18next"; import { - graphql, GraphQLTaggedNode, PreloadedQuery, useFragment, usePreloadedQuery, + graphql, useFragment, usePreloadedQuery, + GraphQLTaggedNode, PreloadedQuery, } from "react-relay"; import { unreachable } from "@opencast/appkit"; -import { eventId, isSynced, keyOfId } from "../util"; +import { eventId, getCredentials, isSynced, keyOfId } from "../util"; import { GlobalErrorBoundary } from "../util/err"; import { loadQuery } from "../relay"; import { makeRoute, MatchedRoute } from "../rauta"; @@ -18,6 +19,7 @@ import { EmbedQuery } from "./__generated__/EmbedQuery.graphql"; import { EmbedDirectOpencastQuery } from "./__generated__/EmbedDirectOpencastQuery.graphql"; import { EmbedEventData$key } from "./__generated__/EmbedEventData.graphql"; import { PlayerContextProvider } from "../ui/player/PlayerContext"; +import { PreviewPlaceholder, useEventWithAuthData } from "./Video"; export const EmbedVideoRoute = makeRoute({ url: ({ videoId }: { videoId: string }) => `/~embed/!v/${keyOfId(videoId)}`, @@ -27,15 +29,21 @@ export const EmbedVideoRoute = makeRoute({ if (params === null) { return null; } - const videoId = decodeURIComponent(params[1]); + const id = eventId(decodeURIComponent(params[1])); const query = graphql` - query EmbedQuery($id: ID!) { + query EmbedQuery($id: ID!, $eventUser: String, $eventPassword: String) { event: eventById(id: $id) { ... EmbedEventData } } - `; + `; + + const creds = getCredentials("event", id); + const queryRef = loadQuery(query, { + id, + eventUser: creds?.user, + eventPassword: creds?.password, + }); - const queryRef = loadQuery(query, { id: eventId(videoId) }); return matchedEmbedRoute(query, queryRef); }, @@ -51,13 +59,22 @@ export const EmbedOpencastVideoRoute = makeRoute({ } const query = graphql` - query EmbedDirectOpencastQuery($id: String!) { + query EmbedDirectOpencastQuery( + $id: String!, + $eventUser: String, + $eventPassword: String) + { event: eventByOpencastId(id: $id) { ... EmbedEventData } } `; const videoId = decodeURIComponent(matches[1]); - const queryRef = loadQuery(query, { id: videoId }); + const creds = getCredentials("oc-event", videoId); + const queryRef = loadQuery(query, { + id: videoId, + eventUser: creds?.user, + eventPassword: creds?.password, + }); return matchedEmbedRoute(query, queryRef); }, @@ -90,6 +107,7 @@ const embedEventFragment = graphql` __typename ... on NotAllowed { dummy } ... on AuthorizedEvent { + id title created isLive @@ -97,17 +115,18 @@ const embedEventFragment = graphql` creators metadata description - series { title opencastId } + canWrite + hasPassword + series { title id opencastId } syncedData { updated startTime endTime duration thumbnail - tracks { uri flavor mimetype resolution isMaster } - captions { uri lang } - segments { uri startTime } } + ... VideoPageAuthorizedData + @arguments(eventUser: $eventUser, eventPassword: $eventPassword) } } `; @@ -120,10 +139,11 @@ type EmbedProps = { const Embed: React.FC = ({ query, queryRef }) => { const fragmentRef = usePreloadedQuery(query, queryRef); - const event = useFragment( + const protoEvent = useFragment( embedEventFragment, fragmentRef.event, ); + const [event, refetch] = useEventWithAuthData(protoEvent); const { t } = useTranslation(); if (!event) { @@ -151,7 +171,9 @@ const Embed: React.FC = ({ query, queryRef }) => { ; } - return ; + return event.authorizedData + ? + : ; }; export const BlockEmbedRoute = makeRoute({ diff --git a/frontend/src/routes/Login.tsx b/frontend/src/routes/Login.tsx index 5b8f206b1..82d88631d 100644 --- a/frontend/src/routes/Login.tsx +++ b/frontend/src/routes/Login.tsx @@ -1,4 +1,4 @@ -import React, { ReactNode, useId, useState } from "react"; +import React, { PropsWithChildren, ReactNode, useId, useState } from "react"; import { useTranslation } from "react-i18next"; import { graphql, usePreloadedQuery } from "react-relay"; import type { PreloadedQuery } from "react-relay"; @@ -22,6 +22,7 @@ import { Breadcrumbs } from "../ui/Breadcrumbs"; import { OUTER_CONTAINER_MARGIN } from "../layout"; import { COLORS } from "../color"; import { focusStyle } from "../ui"; +import { IconType } from "react-icons"; export const REDIRECT_STORAGE_KEY = "tobira-redirect-after-login"; @@ -118,24 +119,16 @@ const BackButton: React.FC = () => { >{t("general.action.back")}; }; -type FormData = { +export type FormData = { userid: string; password: string; }; +export type AuthenticationFormState = "idle" | "pending" | "success"; + const LoginBox: React.FC = () => { const { t, i18n } = useTranslation(); - const isDark = useColorScheme().scheme === "dark"; - const { register, handleSubmit, watch, formState: { errors } } = useForm(); - const userId = watch("userid", ""); - const password = watch("password", ""); - const userFieldId = useId(); - const passwordFieldId = useId(); - - const validation = { required: t("general.form.this-field-is-required") }; - - type State = "idle" | "pending" | "success"; - const [state, setState] = useState("idle"); + const [state, setState] = useState("idle"); const [loginError, setLoginError] = useState(null); const onSubmit = async (data: FormData) => { @@ -171,15 +164,20 @@ const LoginBox: React.FC = () => { }; return ( -
+ {CONFIG.auth.loginPageNote && (
{ padding: "8px 16px", }}>{translatedConfig(CONFIG.auth.loginPageNote, i18n)}
)} +
+ ); +}; +type AuthenticationFormProps = PropsWithChildren & { + onSubmit: (data: FormData) => Promise | void; + state: AuthenticationFormState; + error: string | null; + className?: string; + SubmitIcon: IconType; + labels: { + user: string; + password: string; + submit: string; + }; +} + +export const AuthenticationForm: React.FC = ({ + onSubmit, + state, + error, + className, + SubmitIcon, + labels, + children, +}) => { + const { t } = useTranslation(); + const isDark = useColorScheme().scheme === "dark"; + const { register, handleSubmit, watch, formState: { errors } } = useForm(); + const userId = watch("userid", ""); + const password = watch("password", ""); + const userFieldId = useId(); + const passwordFieldId = useId(); + + const validation = { required: t("general.form.this-field-is-required") }; + + return ( +
+ {children}
{
{
{ ...focusStyle({ offset: 1 }), }} > - - {t("user.login")} + + {labels.submit} {match(state, { "idle": () => null, "pending": () => , @@ -262,7 +302,7 @@ const LoginBox: React.FC = () => { })} - {loginError &&
{loginError}
} + {error && {error}}
); diff --git a/frontend/src/routes/Realm.tsx b/frontend/src/routes/Realm.tsx index d0faed771..c6921919c 100644 --- a/frontend/src/routes/Realm.tsx +++ b/frontend/src/routes/Realm.tsx @@ -13,7 +13,6 @@ import { RootLoader } from "../layout/Root"; import { NotFound } from "./NotFound"; import { Nav } from "../layout/Navigation"; import { LinkList, LinkWithIcon } from "../ui"; -import CONFIG from "../config"; import { characterClass, useTitle, useTranslatedConfig } from "../util"; import { makeRoute } from "../rauta"; import { MissingRealmName } from "./util"; @@ -27,6 +26,7 @@ import { COLORS } from "../color"; import { useMenu } from "../layout/MenuState"; import { ManageNav } from "./manage"; import { BREAKPOINT as NAV_BREAKPOINT } from "../layout/Navigation"; +import CONFIG from "../config"; // eslint-disable-next-line @typescript-eslint/quotes @@ -143,17 +143,16 @@ type Props = { const RealmPage: React.FC = ({ realm }) => { const { t } = useTranslation(); - const siteTitle = useTranslatedConfig(CONFIG.siteTitle); const breadcrumbs = realmBreadcrumbs(t, realm.ancestors); + const siteTitle = useTranslatedConfig(CONFIG.siteTitle); - const title = realm.isMainRoot ? siteTitle : realm.name; - useTitle(title, realm.isMainRoot); + useTitle(realm.name); return <> {!realm.isMainRoot && ( } /> )} - {title && ( + {realm.name ? (
= ({ realm }) => { columnGap: 12, rowGap: 6, }}> -

{title}

+

{realm.name}

{realm.isUserRealm && }
+ ) : ( + // If there is no heading, this visually hidden

is added for screen readers. + realm.isMainRoot &&

{siteTitle}

)} {realm.blocks.length === 0 && realm.isMainRoot ? diff --git a/frontend/src/routes/Search.tsx b/frontend/src/routes/Search.tsx index b48fe1d16..7f7ff8c1b 100644 --- a/frontend/src/routes/Search.tsx +++ b/frontend/src/routes/Search.tsx @@ -1,24 +1,38 @@ import { Trans, useTranslation } from "react-i18next"; -import { graphql } from "react-relay"; +import { graphql, PreloadedQuery, usePreloadedQuery, useQueryLoader } from "react-relay"; import { + LuCalendar, LuCalendarRange, LuLayout, - LuLibrary, - LuPlayCircle, LuRadio, + LuVolume2, LuX, } from "react-icons/lu"; -import { IconType } from "react-icons"; -import { ReactNode, RefObject, useEffect, useRef } from "react"; +import { LetterText } from "lucide-react"; import { + ReactNode, + startTransition, + Suspense, + useCallback, + useEffect, + useRef, + useState, +} from "react"; +import { + Button, + Card, Floating, FloatingContainer, FloatingTrigger, ProtoButton, + Spinner, + WithTooltip, + match, screenWidthAtMost, unreachable, useColorScheme, } from "@opencast/appkit"; +import { CSSObject } from "@emotion/react"; import { RootLoader } from "../layout/Root"; import { @@ -36,18 +50,25 @@ import { ThumbnailOverlay, ThumbnailOverlayContainer, ThumbnailReplacement, + formatDuration, } from "../ui/Video"; import { SmallDescription } from "../ui/metadata"; import { Breadcrumbs, BreadcrumbsContainer, BreadcrumbSeparator } from "../ui/Breadcrumbs"; import { MissingRealmName } from "./util"; import { ellipsisOverflowCss, focusStyle } from "../ui"; import { COLORS } from "../color"; -import { BREAKPOINT_MEDIUM, BREAKPOINT_SMALL } from "../GlobalStyle"; -import { isExperimentalFlagSet } from "../util"; -import { Button, Card } from "@opencast/appkit"; +import { BREAKPOINT_MEDIUM } from "../GlobalStyle"; +import { + eventId, + isExperimentalFlagSet, + keyOfId, + secondsToTimeString, +} from "../util"; import { DirectVideoRoute, VideoRoute } from "./Video"; -import { DirectSeriesRoute } from "./Series"; +import { DirectSeriesRoute, SeriesRoute } from "./Series"; import { PartOfSeriesLink } from "../ui/Blocks/VideoList"; +import { SearchSlidePreviewQuery } from "./__generated__/SearchSlidePreviewQuery.graphql"; +import { RelativeDate } from "../ui/time"; export const isSearchActive = (): boolean => document.location.pathname === "/~search"; @@ -82,8 +103,13 @@ export const SearchRoute = makeRoute({ } const q = url.searchParams.get("q") ?? ""; - const filters = prepareFilters(url); + if (!(q in SEARCH_TIMINGS)) { + SEARCH_TIMINGS[q] = {}; + } + SEARCH_TIMINGS[q].routeMatch = window.performance.now(); + + const filters = prepareFilters(url); const queryRef = loadQuery(query, { q, filters }); return { @@ -91,7 +117,10 @@ export const SearchRoute = makeRoute({ {...{ query, queryRef }} noindex nav={() => []} - render={data => } + render={data => { + SEARCH_TIMINGS[q].queryReturned = window.performance.now(); + return ; + }} />, dispose: () => queryRef.dispose(), }; @@ -126,9 +155,9 @@ const query = graphql` ... on SearchUnavailable { dummy } ... on SearchResults { items { - id __typename ... on SearchEvent { + id title description thumbnail @@ -141,16 +170,39 @@ const query = graphql` startTime endTime created - hostRealms { path } + hostRealms { path ancestorNames } + textMatches { + start + duration + text + ty + highlights + } + matches { + title + description + seriesTitle + creators { index span } + } } ... on SearchSeries { + id title description thumbnails { thumbnail isLive audioOnly } + hostRealms { path ancestorNames } + matches { title description } + } + ... on SearchRealm { + id + name + path + ancestorNames + matches { name } } - ... on SearchRealm { name path ancestorNames } } totalHits + duration } } } @@ -166,21 +218,31 @@ const SearchPage: React.FC = ({ q, outcome }) => { const router = useRouter(); useEffect(() => { - const handleEscape = ((ev: KeyboardEvent) => { - if (ev.key === "Escape") { - handleNavigation(router); - } - }); - document.addEventListener("keyup", handleEscape); - return () => document.removeEventListener("keyup", handleEscape); + if (!isExperimentalFlagSet() || LAST_PRINTED_TIMINGS_QUERY === q) { + return; + } + + const info = SEARCH_TIMINGS[q]; + info.rendered = window.performance.now(); + const diff = (a?: number, b?: number) => !a || !b ? null : Math.round(b - a); + // eslint-disable-next-line no-console + console.table([{ + q: q, + routing: diff(info.startSearch, info.routeMatch), + query: diff(info.routeMatch, info.queryReturned), + backend: outcome.__typename === "SearchResults" ? outcome.duration : null, + render: diff(info.queryReturned, info.rendered), + }]); + LAST_PRINTED_TIMINGS_QUERY = q; }); + let body; if (outcome.__typename === "EmptyQuery") { body = {t("search.too-few-characters")}; } else if (outcome.__typename === "SearchUnavailable") { body =
- {t("search.unavailable")} + {t("search.unavailable")}
; } else if (outcome.__typename === "SearchResults") { body = outcome.items.length === 0 @@ -191,15 +253,21 @@ const SearchPage: React.FC = ({ q, outcome }) => { } const hits = outcome.__typename === "SearchResults" ? outcome.totalHits : 0; + const timingInfo = isExperimentalFlagSet() && outcome.__typename === "SearchResults" + ? <>{` • ${outcome.duration}ms`} + : null; return <> - {{ query: q }} - + ? <> + + {{ query: q }} + + {timingInfo} + : t("search.no-query") } /> -
+
{isExperimentalFlagSet() && <> {/* Filters */}
@@ -353,113 +421,84 @@ const CenteredNote: React.FC<{ children: ReactNode }> = ({ children }) => ( ); type Results = Extract; +type Item = Results["items"][number]; +type EventItem = Omit, "__typename">; +type SeriesItem = Omit, "__typename">; +type RealmItem = Omit, "__typename">; type SearchResultsProps = { items: Results["items"]; }; -const unwrapUndefined = (value: T | undefined): T => typeof value === "undefined" - ? unreachable("type dependent field for search item is not set") - : value; - -const SearchResults: React.FC = ({ items }) => ( -
    - {items.map(item => { - if (item.__typename === "SearchEvent") { - return ; - } else if (item.__typename === "SearchSeries") { - return ; - } else if (item.__typename === "SearchRealm") { - return ; +const SearchResults: React.FC = ({ items }) => { + // Make search results navigatable by arrow keys. For this we don't use any + // react state, but DOM methods directly. This is way easier in this case + // to properly deal with changing focus due to use of the tab-key for + // example. Using tab and arrow keys in hybrid works with this approach. + // For this to work we add marker class names to two nodes below. + useEffect(() => { + const focus = (e: Element | null) => { + const a = e?.querySelector("a.search-result-item-overlay-link"); + if (a && a instanceof HTMLElement) { + a.focus(); + } + }; + + const handler = (e: KeyboardEvent) => { + let dir: "up" | "down"; + if (e.key === "ArrowDown") { + dir = "down"; + } else if (e.key === "ArrowUp") { + dir = "up"; } else { - // eslint-disable-next-line no-console - console.warn("Unknown search item type: ", item.__typename); - return null; + return; } - })} -
-); -type WithIconProps = React.PropsWithChildren<{ - Icon: IconType; - iconSize?: number; - hideIconOnMobile?: boolean; -}>; + e.preventDefault(); -const WithIcon: React.FC = ({ Icon, iconSize = 30, children, hideIconOnMobile }) => ( -
- - {children} -
-); + const selected = document.querySelector(".search-result-item:focus-within"); + if (selected == null) { + if (dir === "down") { + focus(document.querySelector(".search-result-item")); + } + } else { + focus(selected[match(dir, { + down: () => "nextElementSibling" as const, + up: () => "previousElementSibling" as const, + })]); + } + }; + document.addEventListener("keydown", handler); + return () => document.removeEventListener("keydown", handler); + }); -type SearchEventProps = { - id: string; - title: string; - description: string | null; - thumbnail: string | null; - duration: number; - creators: readonly string[]; - seriesTitle: string | null; - seriesId: string | null; - isLive: boolean; - audioOnly: boolean; - created: string; - startTime: string | null; - endTime: string | null; - hostRealms: readonly { readonly path: string }[]; + return ( +
    + {items.map(item => { + if (item.__typename === "SearchEvent") { + return ; + } else if (item.__typename === "SearchSeries") { + return ; + } else if (item.__typename === "SearchRealm") { + return ; + } else { + // eslint-disable-next-line no-console + console.warn("Unknown search item type: ", item.__typename); + return null; + } + })} +
+ ); }; -const SearchEvent: React.FC = ({ +const SearchEvent: React.FC = ({ id, title, description, @@ -474,6 +513,8 @@ const SearchEvent: React.FC = ({ startTime, endTime, hostRealms, + textMatches, + matches, }) => { // TODO: decide what to do in the case of more than two host realms. Direct // link should be avoided. @@ -481,24 +522,64 @@ const SearchEvent: React.FC = ({ ? DirectVideoRoute.url({ videoId: id }) : VideoRoute.url({ realmPath: hostRealms[0].path, videoID: id }); + const highlightedCreators = creators.map((c, i) => { + const relevantMatches = matches.creators.filter(m => m.index === i).map(m => m.span); + return <>{highlightText(c, relevantMatches)}; + }); + return ( - - + {{ + image: + + , + info:
+ {hostRealms.length === 1 && ( + + )} +

{highlightText(title, matches.title)}

-

{title}

- + + +
+ = ({ li: { display: "inline", }, + mark: highlightCss(COLORS.neutral90), }} /> - {description && } - {seriesTitle && seriesId && }
-
- -
+ +
+ + {description && } + +
+ + {seriesTitle && seriesId && } + {/* Show timeline with matches if there are any */} + {textMatches.length > 0 && ( + + )} +
, + }} ); }; -const thumbnailCss = { - outline: `1px solid ${COLORS.neutral15}`, - minWidth: 270, - width: 270, - marginLeft: "auto", - [screenWidthAtMost(800)]: { - minWidth: 240, - width: 240, - }, - [screenWidthAtMost(BREAKPOINT_MEDIUM)]: { - maxWidth: 400, - margin: "0 auto", - }, +type TextMatchTimelineProps = Pick & { + link: string; +}; + +const slidePreviewQuery = graphql` + query SearchSlidePreviewQuery($id: ID!, $user: String, $password: String) { + eventById(id: $id) { + ...on AuthorizedEvent { + id + authorizedData(user: $user, password: $password) { + segments { startTime uri } + } + } + } + } +`; + +const TextMatchTimeline: React.FC = ({ + id, duration, textMatches, link, +}) => { + const sectionLink = (startMs: number) => `${link}?t=${secondsToTimeString(startMs / 1000)}`; + const [queryRef, loadQuery] + = useQueryLoader(slidePreviewQuery); + const ref = useRef(null); + + // We initially don't render the actual matches at all, since that costs + // quite a bit of time, especially when there are many many matches. So + // instead, we only render properly once the timeline is close to the + // viewport. This means that on the initial route render, only empty + // timelines are rendered. Then all matches inside the viewport are + // rendered, and only when scrolling down, further matches are rendered. + const [doRender, setDoRender] = useState(false); + useEffect(() => { + const handler: IntersectionObserverCallback = entries => { + // Just checking the first element is fine as we only observe one. + if (entries[0]?.isIntersecting) { + startTransition(() => setDoRender(true)); + } + }; + const observer = new IntersectionObserver(handler, { + root: null, + rootMargin: "200px 0px 200px 0px", + threshold: 0, + }); + observer.observe(ref.current!); + return () => observer.disconnect(); + }, [setDoRender]); + + + const loadSegmentImages = useCallback(() => { + // Just calling `loadQuery` unconditionally would not send the query + // again, but would cause a useless rerender. + if (queryRef == null) { + loadQuery({ id: eventId(keyOfId(id)) }); + } + }, [queryRef, loadQuery, id]); + + // We load the query once the user hovers over the parent container. This + // seems like it would send a query every time the mouse enters, but relay + // caches the results, so it is only sent once. + return ( +
+ {/* The timeline line */} +
+ + {doRender && textMatches.map((m, i) => ( + }> + {queryRef + ? + : } + } + css={{ + position: "absolute", + ...match(m.ty, { + CAPTION: () => ({ + height: "100%", + bottom: "0", + backgroundColor: COLORS.primary1, + }) as CSSObject, + SLIDE_TEXT: () => ({ + height: "70%", + bottom: "15%", + backgroundColor: COLORS.primary0, + }), + "%future added value": () => unreachable(), + }), + width: `calc(${m.duration / duration * 100}% - 1px)`, + minWidth: 6, // To make the sections not too small to click + left: `${m.start / duration * 100}%`, + borderRadius: 1, + "&:hover": { + backgroundColor: COLORS.primary2, + }, + }} + > + + + ))} +
+ ); +}; + +type TextMatchTooltipWithMaybeImageProps = { + queryRef: PreloadedQuery; + textMatch: EventItem["textMatches"][number]; +}; + +const TextMatchTooltipWithMaybeImage: React.FC = ({ + queryRef, + textMatch, +}) => { + const data = usePreloadedQuery(slidePreviewQuery, queryRef); + const segments = data.eventById?.authorizedData?.segments ?? []; + + // Find the segment with its start time closest to the `start` of the text + // match, while still being smaller. + let currBestDiff = Infinity; + let currBest = undefined; + for (const segment of segments) { + // Relax the comparison a bit to be able to deal with rounding errors + // somewhere in the pipeline. Note that we still use the closest and + // segments are usually fairly long, so this is unlikely to result in + // any negative effects. + if (segment.startTime <= textMatch.start + 500) { + const diff = textMatch.start - segment.startTime; + if (diff < currBestDiff) { + currBestDiff = diff; + currBest = segment; + } + } + } + + return ; +}; + +type TextMatchTooltipProps = { + previewImage?: string; + textMatch: EventItem["textMatches"][number]; +}; + +const TextMatchTooltip: React.FC = ({ previewImage, textMatch }) => { + const startDuration = formatDuration(textMatch.start); + const endDuration = formatDuration(textMatch.start + textMatch.duration); + + return <> + {/* Icon to show what kind of textMatch this is */} +
+ {match(textMatch.ty, { + CAPTION: () => , + SLIDE_TEXT: () => , + "%future added value": unreachable, + })} +
+ + {previewImage && ( + + )} +
+
+ …{highlightText(textMatch.text, textMatch.highlights)}… +
+
+
+ {`(${startDuration} – ${endDuration})`} +
+ ; }; @@ -556,75 +851,142 @@ type ThumbnailInfo = { readonly isLive: boolean; readonly thumbnail: string | null | undefined; } -type SearchSeriesProps = { - id: string; - title: string; - description: string | null; - thumbnails: readonly ThumbnailInfo[] | undefined; -} -const SearchSeries: React.FC = ({ id, title, description, thumbnails }) => - - -
-

{title}

- {description && } -
-
- -
-; - -type ThumbnailStackProps = Pick - -const ThumbnailStack: React.FC = ({ thumbnails, title }) => ( -
div": { - outline: `1px solid ${COLORS.neutral10}`, - borderRadius: 8, - }, - "> div:not(:last-child)": { - boxShadow: "3px -2px 6px rgba(0, 0, 0, 40%)", - }, - "> div:nth-child(1)": { - zIndex: 3, - gridColumn: "1 / span 10", - gridRow: "3 / span 10", - }, - "> div:nth-child(2)": { - zIndex: 2, - gridColumn: "2 / span 10", - gridRow: "2 / span 10", - }, - "> div:nth-child(3)": { - zIndex: 1, - gridColumn: "3 / span 10", - gridRow: "1 / span 10", +const SearchSeries: React.FC = ({ + id, title, description, thumbnails, matches, hostRealms, +}) => { + // TODO: decide what to do in the case of more than two host realms. Direct + // link should be avoided. + const link = hostRealms.length !== 1 + ? DirectSeriesRoute.url({ seriesId: id }) + : SeriesRoute.url({ realmPath: hostRealms[0].path, seriesId: id }); + + return {{ + image: + + , + info:
+ {hostRealms.length === 1 && ( + + )} +

{highlightText(title, matches.title)}

+ {description && } +
, + }}
; +}; + +type ThumbnailStackProps = Pick + +const ThumbnailStack: React.FC = ({ thumbnails, title }) => { + const isDarkScheme = useColorScheme().scheme === "dark"; + + return ( +
div": { + position: "relative", + borderRadius: 8, + // The outline needs to be in a pseudo element as otherwise, it is + // hidden behind the img for some reason. + "::after": { + content: "''", + position: "absolute", + inset: 0, + borderRadius: 8, + outline: `2px solid ${COLORS.neutral70}`, + outlineOffset: -2, + }, + }, + "> div:not(:last-child)": { + boxShadow: "3px -2px 6px rgba(0, 0, 0, 40%)", + }, + "> div:nth-child(1)": { + zIndex: 3, + gridColumn: "1 / span 10", + gridRow: "3 / span 10", + }, + "> div:nth-child(2)": { + zIndex: 2, + gridColumn: "2 / span 10", + gridRow: "2 / span 10", + }, + "> div:nth-child(3)": { + zIndex: 1, + gridColumn: "3 / span 10", + gridRow: "1 / span 10", + }, + }}> + {thumbnails.slice(0, 3).map((info, idx) =>
+ +
)} + {/* Add fake thumbnails to always have 3. The visual image of 3 things behind each other + is more important than actually showing the correct number of thumbnails. */} + {[...Array(Math.max(0, 3 - thumbnails.length))].map((_, idx) => ( +
+ +
+ ))} +
+ ); +}; + +const DummySeriesStackThumbnail: React.FC<{ isDark: boolean }> = ({ isDark }) => ( + - {thumbnails?.map((info, idx) =>
- -
)} -
+ + "--_g": "0 120deg,#0000 0", + background: ` + conic-gradient( at calc(250%/3) calc(100%/3), + var(--c3) var(--_g)), + conic-gradient(from -120deg at calc( 50%/3) calc(100%/3), + var(--c2) var(--_g)), + conic-gradient(from 120deg at calc(100%/3) calc(250%/3), + var(--c1) var(--_g)), + conic-gradient(from 120deg at calc(200%/3) calc(250%/3), + var(--c1) var(--_g)), + conic-gradient(from -180deg at calc(100%/3) 50%, + var(--c2) 60deg,var(--c1) var(--_g)), + conic-gradient(from 60deg at calc(200%/3) 50%, + var(--c1) 60deg,var(--c3) var(--_g)), + conic-gradient(from -60deg at 50% calc(100%/3), + var(--c1) 120deg,var(--c2) 0 240deg,var(--c3) 0) + `, + backgroundSize: "calc(var(--s)*sqrt(3)) var(--s)", + }} /> ); type SeriesThumbnailProps = { @@ -658,81 +1020,227 @@ const SeriesThumbnail: React.FC = ({ info, title }) => { ; }; -type SearchRealmProps = { - id: string; - name: string | null; +type SearchBreadcrumbsProps = { ancestorNames: readonly (string | null | undefined)[]; - fullPath: string; }; -const SearchRealm: React.FC = ({ id, name, ancestorNames, fullPath }) => ( - - -
- - {ancestorNames.map((name, i) =>
  • - {name ?? } - -
  • )} -
    -

    {name ?? }

    +const SearchBreadcrumbs: React.FC = ({ ancestorNames }) => ( + + {ancestorNames.map((name, i) =>
  • + {name ?? } + +
  • )} +
    +); + +const SearchRealm: React.FC = ({ + id, name, ancestorNames, path, matches, +}) => ( + {{ + image:
    , + info: ( +
    + +

    + {name ? highlightText(name, matches.name) : } +

    - -
    + ), + }} ); type ItemProps = { link: string; - children: ReactNode; + breakpoint?: number; + children: { + image: ReactNode; + info: ReactNode; + }; }; -const Item: React.FC = ({ link, children }) => ( -
  • = ({ link, breakpoint = 0, children }) => ( +
  • *:last-child": { - width: "100%", - }, }, }}> - - {children} +
    {children.image}
    +
    + {children.info} +
  • ); -// If a user initiated the search in Tobira (i.e. neither coming from an -// external link nor using the browser bar to manually visit the /~search route), -// we can redirect to the previous page. Otherwise we redirect to Tobira's homepage. -export const handleNavigation = ((router: RouterControl, ref?: RefObject) => { - if (ref?.current) { - // Why is this necessary? When a user reloads the search page and then navigates - // away within Tobira, the search input isn't cleared like it would be usually. - // So it needs to be done manually. - ref.current.value = ""; +/** + * Slices a string with byte indices. Never cuts into UTF-8 chars, but + * arbitrarily decides in what output to place them. + */ +const byteSlice = (s: string, start: number, len: number): readonly [string, string, string] => { + const isCharBoundary = (b: Uint8Array, idx: number): boolean => { + if (idx === 0 || idx === b.byteLength) { + return true; + } + const v = b.at(idx); + if (v === undefined) { + return false; + } + + // UTF-8 chars have either the first bit 0 or the first two bits 1. + return v < 0x80 || v >= 0xC0; + }; + + const bytes = new TextEncoder().encode(s); + const decoder = new TextDecoder("utf-8"); + + // Round indices to avoid cutting into UTF8 chars. The loop only needs to + // execute 3 times as every 4 bytes there is always a char boundary. + let end = start + len; + for (let i = 0; i < 3; i += 1) { + if (!isCharBoundary(bytes, start)) { + start += 1; + } + if (!isCharBoundary(bytes, end)) { + end += 1; + } } - if (router.internalOrigin) { - window.history.back(); - } else { - router.goto("/"); + + return [ + decoder.decode(bytes.slice(0, start)), + decoder.decode(bytes.slice(start, end)), + decoder.decode(bytes.slice(end)), + ] as const; +}; + +/** + * Inserts `` elements inside `s` to highlight parts of text, as specified + * by `spans`. If `maxUnmarkedSectionLen` is specified, this function makes + * sure that all sections without any highlight (except the last one) is at + * most that many characters¹ long. If the a section is longer, its middle is + * replaced by " … " to stay within the limit. + * + * ¹ Well, technically UTF-16 code points, and this is important, but in our + * case it's a loosy goosy business anyway, since the number only approximates + * the available space for rendering anyway. + */ +const highlightText = ( + s: string, + spans: readonly string[], + maxUnmarkedSectionLen = Infinity, +) => { + const textParts = []; + let remainingText = s; + let offset = 0; + for (const encodedSpan of spans) { + const [start, len] = encodedSpan.split("-").map(v => parseInt(v, 16)); + const span = { start, len }; + + const highlightStart = span.start - offset; + const [prefix_, middle, rest] + = byteSlice(remainingText, highlightStart, span.len); + let prefix = prefix_; + + // If the first part (without a match) is too long, we truncate its + // middle. + if (prefix.length > maxUnmarkedSectionLen) { + const halfLen = maxUnmarkedSectionLen / 2 - 2; + const start = prefix.substring(0, halfLen); + const end = prefix.substring(prefix.length - halfLen); + prefix = `${start} … ${end}`; + } + + if (prefix) { + textParts.push({prefix}); + } + textParts.push({middle}); + remainingText = rest; + offset = span.start + span.len; } + textParts.push(remainingText); + + return textParts; +}; + +const highlightCss = (color: string) => ({ + color, + backgroundColor: "var(--highlight-color)", + borderRadius: 2, }); +// This is for profiling search performance. We might remove this later again. +export const SEARCH_TIMINGS: Record = {}; +let LAST_PRINTED_TIMINGS_QUERY: string | null = null; diff --git a/frontend/src/routes/Series.tsx b/frontend/src/routes/Series.tsx index 9c1a9011c..143797277 100644 --- a/frontend/src/routes/Series.tsx +++ b/frontend/src/routes/Series.tsx @@ -4,20 +4,159 @@ import { useTranslation } from "react-i18next"; import { loadQuery } from "../relay"; import { makeRoute } from "../rauta"; import { SeriesBlockFromSeries } from "../ui/Blocks/Series"; -import { RootLoader } from "../layout/Root"; +import { InitialLoading, RootLoader } from "../layout/Root"; import { Nav } from "../layout/Navigation"; import { PageTitle } from "../layout/header/ui"; import { WaitingPage } from "../ui/Waiting"; import { isSynced, keyOfId, seriesId } from "../util"; import { NotFound } from "./NotFound"; -import { SeriesByOpencastIdQuery } from "./__generated__/SeriesByOpencastIdQuery.graphql"; import { b64regex } from "./util"; import { SeriesByIdQuery } from "./__generated__/SeriesByIdQuery.graphql"; import { SeriesRouteData$key } from "./__generated__/SeriesRouteData.graphql"; import { Breadcrumbs } from "../ui/Breadcrumbs"; +import { isValidRealmPath } from "./Realm"; +import { useRouter } from "../router"; +import { useEffect } from "react"; +import { SeriesPageRealmData$key } from "./__generated__/SeriesPageRealmData.graphql"; +import { realmBreadcrumbs } from "../util/realm"; +import { + SeriesDirectByOpencastIdQuery, +} from "./__generated__/SeriesDirectByOpencastIdQuery.graphql"; +import { SeriesDirectByIdQuery } from "./__generated__/SeriesDirectByIdQuery.graphql"; +import { SeriesByOcIdQuery } from "./__generated__/SeriesByOcIdQuery.graphql"; +export const SeriesRoute = makeRoute({ + url: ({ realmPath, seriesId }: { realmPath: string; seriesId: string }) => + `${realmPath === "/" ? "" : realmPath}/s/${keyOfId(seriesId)}`, + match: url => { + const params = checkSeriesRealmPath(url, b64regex); + if (params == null) { + return null; + } + const query = graphql` + query SeriesByIdQuery($id: ID!, $realmPath: String!) { + ... UserData + series: seriesById(id: $id) { + ...SeriesRouteData + isReferencedByRealm(path: $realmPath) + } + realm: realmByPath(path: $realmPath) { + ... NavigationData + ... SeriesPageRealmData + } + } + `; + const queryRef = loadQuery(query, { + id: seriesId(params.seriesId), + realmPath: params.realmPath, + }); + + + return { + render: () => data.realm ?