This Rust crate provides a REST API for Wikibase. It is based on the Wikibase REST API. It works on any MediaWiki installation with the Wikibase extension and an enabled Wikibase REST API.
See also the examples.
// Create an API (use the Wikidata API shortcut)
let api = RestApi::wikidata()?;
// Use Q42 (Douglas Adams) as an example item
let id = EntityId::new("Q42")?;
// Get the label and sitelink of Q42
let q42_label_en = Label::get(&id, "en", &api).await?.value().to_owned();
let q42_sitelink = Sitelink::get(&id, "enwiki", &api).await?.title().to_owned();
println!("Q42 '{q42_label_en}' => [[enwiki:{q42_sitelink}]]");
// Create a new item
let mut item = Item::default();
item.labels_mut()
.insert(LanguageString::new("en", "My label"));
item.statements_mut()
.insert(Statement::new_string("P31", "Q42"));
let item: Item = item.post(&api).await.unwrap();
println!("Created new item {}", item.id());
// Load multiple entities concurrently
let entity_ids = [
"Q42", "Q1", "Q2", "Q3", "Q4", "Q5", "Q6", "Q7", "Q8", "Q9", "P214",
]
.iter()
.map(|id| EntityId::new(*id))
.collect::<Result<Vec<_>, RestApiError>>()?;
// A container will manage the concurrent loading of entities.
let api = Arc::new(api);
let entity_container = EntityContainer::builder()
.api(api)
.max_concurrent(50)
.build()?;
entity_container.load(&entity_ids).await?;
let q42 = entity_container
.items()
.read()
.await
.get("Q42")
.unwrap()
.to_owned();
let q42_label_en = q42.labels().get_lang("en").unwrap();
println!("Q42 label[en]: {q42_label_en}");
-
post
-
get
-
patch
-
post
-
get
-
patch
-
get item_id
-
patch
-
get itemid/sitelink_id
-
put itemid/sitelink_id
-
delete itemid/sitelink_id
-
get item_id
-
patch item_id
-
get property_id
-
patch property_id
-
get item_id/language_code
-
put item_id/language_code
-
delete item_id/language_code
-
get item_id/language_code
with fallback language -
get property_id/language_code
-
put property_id/language_code
-
delete property_id/language_code
-
get property_id/language_code
with fallback language
-
get item_id
-
patch item_id
-
get property_id
-
patch property_id
-
get item_id/language_code
-
put item_id/language_code
-
delete item_id/language_code
-
get item_id/language_code
with fallback language -
get property_id/language_code
-
put property_id/language_code
-
delete property_id/language_code
-
get property_id/language_code
with fallback language
-
get item_id
-
patch item_id
-
get property_id
-
patch property_id
-
get item_id/language_code
-
post item_id/language_code
-
get property_id/language_code
-
post property_id/language_code
-
get item_id
-
post item_id
-
get item_id/statement_id
asget statement_id
-
put item_id/statement_id
asput statement_id
-
patch item_id/statement_id
aspatch statement_id
-
delete item_id/statement_id
asdelete statement_id
-
get property_id
-
post property_id
-
get property_id/statement_id
asget statement_id
-
put property_id/statement_id
asput statement_id
-
patch property_id/statement_id
aspatch statement_id
-
delete property_id/statement_id
asdelete statement_id
-
get statement_id
-
put statement_id
-
patch statement_id
-
delete statement_id
-
/openapi.json
-
/property-data-types
-
seach items
(not implemented in wikibase yet/v0?)
- Maxlag/rate limits?
Code analysis is run via analysis.sh
.
cargo install cargo-tarpaulin # Once
cargo tarpaulin -o html
Lizard is a simple code analyzer, giving cyclomatic complexity etc. https://github.com/terryyin/lizard
lizard src -C 7 -V -L 40
Run rust-code-analysis.py
(requires rust-code-analysis-cli
to be installed) to generate analysis.tab
.
This contains many metrics on code complexity and quality.
./rust-code-analysis.py
cargo tarpaulin -o html
Installation and usage: https://github.com/rust-lang/miri
cargo +nightly miri test