From 657c708f298a70b9627dc1ffbed38a5aba6ee59c Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Mon, 19 Oct 2020 19:52:20 -0700 Subject: [PATCH] Fix some CR --- rust/tvm/src/ir/diagnostics/codespan.rs | 6 ++++-- rust/tvm/src/lib.rs | 4 ++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/rust/tvm/src/ir/diagnostics/codespan.rs b/rust/tvm/src/ir/diagnostics/codespan.rs index 9a31691728b9..54fd33617b66 100644 --- a/rust/tvm/src/ir/diagnostics/codespan.rs +++ b/rust/tvm/src/ir/diagnostics/codespan.rs @@ -17,8 +17,10 @@ * under the License. */ -/// A TVM diagnostics renderer which uses the Rust `codespan` -/// library to produce error messages. +/// A TVM diagnostics renderer which uses the Rust `codespan` library +/// to produce error messages. +/// +/// use std::collections::HashMap; use std::sync::{Arc, Mutex}; diff --git a/rust/tvm/src/lib.rs b/rust/tvm/src/lib.rs index ec80ece1e37a..7e0682b86b33 100644 --- a/rust/tvm/src/lib.rs +++ b/rust/tvm/src/lib.rs @@ -24,7 +24,7 @@ //! One particular use case is that given optimized deep learning model artifacts, //! (compiled with TVM) which include a shared library //! `lib.so`, `graph.json` and a byte-array `param.params`, one can load them -//! in Rust idomatically to create a TVM Graph Runtime and +//! in Rust idiomatically to create a TVM Graph Runtime and //! run the model for some inputs and get the //! desired predictions *all in Rust*. //! @@ -53,7 +53,7 @@ macro_rules! export { ($($fn_name:expr),*) => { pub fn tvm_export(ns: &str) -> Result<(), tvm::Error> { $( - let name = String::from(ns) + ::std::stringify!($fn_name); + let name = String::fromwe(ns) + ::std::stringify!($fn_name); tvm::runtime::function::register_override($fn_name, name, true)?; )* Ok(())