Page MenuHomeDevCentral

D3424.id8841.diff
No OneTemporary

D3424.id8841.diff

diff --git a/Cargo.toml b/Cargo.toml
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,10 +1,12 @@
[workspace]
members = [
+ "ds-http-client",
"fantoir-datasource",
"language-subtag-registry-datasource",
"rfc-datasource",
"opendatasoft-explore-api",
+ "sparql-client",
]
resolver = "2"
diff --git a/ds-http-client/Cargo.toml b/ds-http-client/Cargo.toml
new file mode 100644
--- /dev/null
+++ b/ds-http-client/Cargo.toml
@@ -0,0 +1,15 @@
+[package]
+name = "ds-http-client"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+lazy_static = "~1.5.0"
+
+[dependencies.reqwest]
+version = "~0.12.7"
+features = ["gzip", "deflate"]
+
+[dependencies.tokio]
+version = "~1.39.3"
+features = ["full"]
diff --git a/fantoir-datasource/src/services/http_client.rs b/ds-http-client/src/lib.rs
copy from fantoir-datasource/src/services/http_client.rs
copy to ds-http-client/src/lib.rs
--- a/fantoir-datasource/src/services/http_client.rs
+++ b/ds-http-client/src/lib.rs
@@ -1,13 +1,20 @@
+//! # HTTP client
+//!
+//! High-level interface to Hyper/reqwest HTTP client.
+//!
+//! This library is optimized to work with Nasqueron Datasources components.
+
+use std::collections::HashMap;
use std::io::Error as IOError;
use std::path::Path;
use lazy_static::lazy_static;
-use reqwest::Client as ReqwestClient;
+use reqwest::{Client as ReqwestClient, RequestBuilder};
use reqwest::ClientBuilder;
use reqwest::Error as ReqwestError;
use reqwest::IntoUrl;
use reqwest::Response;
-use reqwest::header::HeaderMap;
+use reqwest::header::{HeaderMap, HeaderValue};
use tokio::fs::File;
use tokio::io::AsyncWriteExt;
@@ -19,11 +26,12 @@
lazy_static! {
pub static ref USER_AGENT: String = format!(
- "{}/{} (https://databases.nasqueron.org/)",
+ "{}/{}",
env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION")
);
}
+/// Gets the default user agent
pub fn get_user_agent () -> &'static str {
&USER_AGENT
}
@@ -32,18 +40,15 @@
HTTP client
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
+/// HTTP client
pub struct Client {
client: ReqwestClient,
}
impl Client {
pub fn new(headers: Option<HeaderMap>) -> Self {
- let headers = headers
- .unwrap_or(HeaderMap::new());
-
let client = ClientBuilder::new()
- .user_agent(get_user_agent())
- .default_headers(headers)
+ .default_headers(build_default_headers(headers))
.gzip(true)
.deflate(true)
.build()
@@ -55,16 +60,31 @@
}
pub async fn get<T>(&self, url: T) -> Result<Response, Error>
- where T: IntoUrl {
- self.client
+ where T: IntoUrl {
+ let request = self.client.get(url);
+ self.run(request).await
+ }
+
+ pub async fn get_with_headers<T>(&self, url: T, headers: HashMap<String, String>) -> Result<Response, Error>
+ where T: IntoUrl {
+ let headers = parse_headers(headers);
+
+ let request = self.client
.get(url)
+ .headers(headers);
+
+ self.run(request).await
+ }
+
+ pub async fn run(&self, request: RequestBuilder) -> Result<Response, Error> {
+ request
.send()
.await
.map_err(|error| Error::Reqwest(error))
}
pub async fn download<P, T>(&self, url: T, target_path: P) -> Result<usize, Error>
- where T: IntoUrl, P: AsRef<Path> {
+ where T: IntoUrl, P: AsRef<Path> {
let mut file = File::create(target_path)
.await
.map_err(|error| Error::IO(error))?;
@@ -76,15 +96,41 @@
.await
.map_err(|error| Error::Reqwest(error))?
{
- bytes_read += file.write(chunk.as_ref())
- .await
- .map_err(|error| Error::IO(error))?;
+ bytes_read += file.write(chunk.as_ref())
+ .await
+ .map_err(|error| Error::IO(error))?;
}
Ok(bytes_read)
}
}
+/* -------------------------------------------------------------
+ HTTP client utilities
+ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
+
+pub fn parse_headers(headers: HashMap<String, String>) -> HeaderMap {
+ headers
+ .iter()
+ .map(|(name, value)| (
+ name.parse().expect("Can't parse header name"),
+ value.parse().expect("Can't parse header value")
+ ))
+ .collect()
+}
+
+fn build_default_headers(headers: Option<HeaderMap>) -> HeaderMap {
+ let mut headers = headers
+ .unwrap_or(HeaderMap::new());
+
+ // RFC 7231 states User-Agent header SHOULD be sent.
+ if !headers.contains_key("User-Agent") {
+ headers.append("User-Agent", HeaderValue::from_static(get_user_agent()));
+ }
+
+ headers
+}
+
/* -------------------------------------------------------------
HTTP client error
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
diff --git a/fantoir-datasource/Cargo.toml b/fantoir-datasource/Cargo.toml
--- a/fantoir-datasource/Cargo.toml
+++ b/fantoir-datasource/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "fantoir-datasource"
-version = "0.1.1"
+version = "0.2.0"
edition = "2021"
description = "Generates a Postgres table from FANTOIR raw file"
authors = [
@@ -10,11 +10,12 @@
[dependencies]
chrono = "~0.4.38"
+ds-http-client = { version = "0.1.0", path = "../ds-http-client" }
lazy_static = "~1.5.0"
opendatasoft-explore-api = { version = "0.1.1", path = "../opendatasoft-explore-api" }
oxrdf = "~0.1.7"
regex = "~1.10.6"
-sparesults = "~0.1.8"
+sparql-client = { version = "0.1.0", path = "../sparql-client" }
[dependencies.async-scoped]
version = "~0.9.0"
@@ -24,10 +25,6 @@
version = "~4.5.16"
features = ["derive"]
-[dependencies.reqwest]
-version = "~0.11.18"
-features = ["gzip", "deflate"]
-
[dependencies.sqlx]
version = "~0.8.1"
features = ["runtime-tokio-native-tls", "postgres", "chrono"]
diff --git a/fantoir-datasource/src/commands/fetch/mod.rs b/fantoir-datasource/src/commands/fetch/mod.rs
--- a/fantoir-datasource/src/commands/fetch/mod.rs
+++ b/fantoir-datasource/src/commands/fetch/mod.rs
@@ -13,7 +13,7 @@
use crate::commands::fetch::fantoir_file::FantoirFile;
use crate::commands::fetch::os::is_command_available;
-use crate::services::http_client::Client as HttpClient;
+use crate::services::http_client::build_http_client;
mod fantoir_file;
mod os;
@@ -36,7 +36,7 @@
}
let target_path = get_fantoir_zip_path();
- if let Err(error) = HttpClient::new(None).download(&fantoir_file.url, &target_path).await {
+ if let Err(error) = build_http_client().download(&fantoir_file.url, &target_path).await {
eprintln!("Can't download FANTOIR file: {:?}", error);
exit(16);
}
diff --git a/fantoir-datasource/src/commands/wikidata/mod.rs b/fantoir-datasource/src/commands/wikidata/mod.rs
--- a/fantoir-datasource/src/commands/wikidata/mod.rs
+++ b/fantoir-datasource/src/commands/wikidata/mod.rs
@@ -7,6 +7,8 @@
use std::process::exit;
use oxrdf::Term;
+use sparql_client::{is_term_empty, parse_literal, parse_term_uri};
+use sparql_client::Client as SparqlClient;
use sqlx::PgPool;
use crate::commands::wikidata::qualification::determine_p31_winner;
@@ -15,7 +17,7 @@
use crate::WikidataArgs;
use crate::fantoir::{fix_fantoir_code, FixedFantoirCode};
use crate::services::query::search_fantoir_code;
-use crate::services::sparql::*;
+use crate::services::http_client::build_http_client;
pub static WIKIDATA_TABLE: &'static str = "fantoir_wikidata";
pub static WIKIDATA_SPARQL_ENDPOINT: &'static str = "https://query.wikidata.org/sparql";
@@ -38,7 +40,10 @@
}
// Query Wikidata and get (Wikidata/FANTOIR code, list of P31 (instance of) values) hashmap
- let client = Client::new(WIKIDATA_SPARQL_ENDPOINT);
+ let client = SparqlClient::from_http_client(
+ WIKIDATA_SPARQL_ENDPOINT,
+ build_http_client()
+ );
let mut what_map = HashMap::new();
client.query(include_str!("../../queries/wikidata.sparql"))
diff --git a/fantoir-datasource/src/services/http_client.rs b/fantoir-datasource/src/services/http_client.rs
--- a/fantoir-datasource/src/services/http_client.rs
+++ b/fantoir-datasource/src/services/http_client.rs
@@ -1,20 +1,29 @@
-use std::io::Error as IOError;
-use std::path::Path;
+//! Build a HTTP client with proper user agent.
+use std::collections::HashMap;
+
+use ds_http_client::{parse_headers, Client};
use lazy_static::lazy_static;
-use reqwest::Client as ReqwestClient;
-use reqwest::ClientBuilder;
-use reqwest::Error as ReqwestError;
-use reqwest::IntoUrl;
-use reqwest::Response;
-use reqwest::header::HeaderMap;
-use tokio::fs::File;
-use tokio::io::AsyncWriteExt;
+
+/* -------------------------------------------------------------
+ HTTP client
+ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
+
+pub fn build_http_client () -> Client {
+ let mut headers = HashMap::new();
+ headers.insert(
+ "User-Agent".to_string(),
+ get_user_agent().to_string(),
+ );
+ let headers = parse_headers(headers);
+
+ Client::new(Some(headers))
+}
/* -------------------------------------------------------------
User agent
- The USER_AGENT variable is computed at build time.
+ Compute at build time user agent to use in HTTP requests.
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
lazy_static! {
@@ -24,77 +33,6 @@
);
}
-pub fn get_user_agent () -> &'static str {
+fn get_user_agent () -> &'static str {
&USER_AGENT
}
-
-/* -------------------------------------------------------------
- HTTP client
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
-
-pub struct Client {
- client: ReqwestClient,
-}
-
-impl Client {
- pub fn new(headers: Option<HeaderMap>) -> Self {
- let headers = headers
- .unwrap_or(HeaderMap::new());
-
- let client = ClientBuilder::new()
- .user_agent(get_user_agent())
- .default_headers(headers)
- .gzip(true)
- .deflate(true)
- .build()
- .expect("Can't build HTTP client");
-
- Self {
- client,
- }
- }
-
- pub async fn get<T>(&self, url: T) -> Result<Response, Error>
- where T: IntoUrl {
- self.client
- .get(url)
- .send()
- .await
- .map_err(|error| Error::Reqwest(error))
- }
-
- pub async fn download<P, T>(&self, url: T, target_path: P) -> Result<usize, Error>
- where T: IntoUrl, P: AsRef<Path> {
- let mut file = File::create(target_path)
- .await
- .map_err(|error| Error::IO(error))?;
-
- let mut target_content = self.get(url).await?;
- let mut bytes_read = 0;
- while let Some(chunk) = target_content
- .chunk()
- .await
- .map_err(|error| Error::Reqwest(error))?
- {
- bytes_read += file.write(chunk.as_ref())
- .await
- .map_err(|error| Error::IO(error))?;
- }
-
- Ok(bytes_read)
- }
-}
-
-/* -------------------------------------------------------------
- HTTP client error
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
-
-/// HTTP client error
-#[derive(Debug)]
-pub enum Error {
- /// Represents an underlying error from Reqwest HTTP client when processing a request.
- Reqwest(ReqwestError),
-
- /// Represents an IO error when doing file operations.
- IO(IOError),
-}
diff --git a/fantoir-datasource/src/services/mod.rs b/fantoir-datasource/src/services/mod.rs
--- a/fantoir-datasource/src/services/mod.rs
+++ b/fantoir-datasource/src/services/mod.rs
@@ -1,3 +1,2 @@
pub mod query;
pub mod http_client;
-pub mod sparql;
diff --git a/fantoir-datasource/src/services/sparql.rs b/fantoir-datasource/src/services/sparql.rs
deleted file mode 100644
--- a/fantoir-datasource/src/services/sparql.rs
+++ /dev/null
@@ -1,136 +0,0 @@
-//! # SPARQL client
-
-use std::collections::HashMap;
-use std::io::BufRead;
-
-use oxrdf::Term;
-use reqwest::header::{HeaderMap, HeaderValue};
-use reqwest::Url;
-use sparesults::*;
-
-use crate::services::http_client::Client as HttpClient;
-
-type SparqlSolution = HashMap<String, Term>;
-
-/// SPARQL client
-pub struct Client {
- pub endpoint: String,
- http_client: HttpClient,
-}
-
-/// Represent results for a SPARQL query
-/// A query can return a collection of solutions or a boolean.
-pub enum SparqlResults {
- /// Results for SELECT queries
- Solutions(Vec<SparqlSolution>),
-
- /// Results for INSERT DATA, UPDATE DATA, etc. queries
- Boolean(bool),
-}
-
-impl Client {
- pub fn new (endpoint: &str) -> Self {
- let mut headers = HeaderMap::new();
- headers.insert("Accept", HeaderValue::from_static("Accept: application/sparql-results+xml"));
-
- Self {
- endpoint: String::from(endpoint),
- http_client: HttpClient::new(Some(headers)),
- }
- }
-
- pub async fn query (&self, query: &str) -> SparqlResults {
- let url = Url::parse_with_params(&self.endpoint, &[("query", query)])
- .expect("Can't parse endpoint as absolute URL.");
-
- let query_results = self.http_client.get(url).await
- .expect("Can't query endpoint")
- .text().await
- .expect("End-point didn't return a reply.");
-
- parse_sparql_results(&query_results)
- }
-}
-
-pub fn parse_sparql_results (query_results: &str) -> SparqlResults {
- let results_reader = get_query_results_xml_reader(query_results.as_bytes());
-
- SparqlResults::read(results_reader)
-}
-
-impl SparqlResults {
- pub fn read<T>(reader: QueryResultsReader<T>) -> Self
- where T: BufRead
- {
- match reader {
- QueryResultsReader::Solutions(solutions) => {
- Self::Solutions(parse_sparql_solutions(solutions))
- },
- QueryResultsReader::Boolean(bool) => Self::Boolean(bool),
- }
- }
-
- pub fn into_solutions (self) -> Option<Vec<SparqlSolution>> {
- match self {
- SparqlResults::Solutions(solutions) => Some(solutions),
- SparqlResults::Boolean(_) => None,
- }
- }
-}
-
-fn get_query_results_xml_reader<T>(reader: T) -> QueryResultsReader<T>
- where T: BufRead
-{
- QueryResultsParser::from_format(QueryResultsFormat::Xml)
- .read_results(reader)
- .expect("Can't read SPARQL results")
-}
-
-fn parse_sparql_solutions<T> (solutions: SolutionsReader<T>) -> Vec<SparqlSolution>
- where T: BufRead
-{
- solutions
- .map(|solution| {
- parse_sparql_result(
- solution.expect("Can't read solution")
- )
- })
- .collect()
-}
-
-pub fn parse_sparql_result (solution: QuerySolution) -> SparqlSolution {
- solution
- .iter()
- .map(|(variable, term)| (
- variable.as_str().to_string(),
- term.clone(),
- ))
- .collect()
-}
-
-pub fn parse_term_uri (term: &Term) -> Option<String> {
- if let Term::NamedNode(node) = term {
- Some(node.as_str().to_string())
- } else {
- None
- }
-}
-
-pub fn parse_literal (term: &Term) -> Option<String> {
- if let Term::Literal(literal) = term {
- Some(literal.value().to_string())
- } else {
- None
- }
-}
-
-pub fn is_term_empty(term: &Term) -> bool {
- match term {
- Term::NamedNode(node) => {
- // Special values IRI are considered as empty values.
- node.as_str().contains("/.well-known/genid/")
- }
- Term::BlankNode(_) => true,
- Term::Literal(_) => false,
- }
-}
diff --git a/sparql-client/Cargo.toml b/sparql-client/Cargo.toml
new file mode 100644
--- /dev/null
+++ b/sparql-client/Cargo.toml
@@ -0,0 +1,18 @@
+[package]
+name = "sparql-client"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+ds-http-client = { version = "0.1.0", path = "../ds-http-client" }
+oxrdf = "~0.1.7"
+lazy_static = "~1.5.0"
+tokio = "~1.39.3"
+
+[dependencies.reqwest]
+version = "~0.12.7"
+features = ["gzip", "deflate"]
+
+[dependencies.sparesults]
+version = "~0.1.8"
+features = ["rdf-star"]
diff --git a/sparql-client/src/lib.rs b/sparql-client/src/lib.rs
new file mode 100644
--- /dev/null
+++ b/sparql-client/src/lib.rs
@@ -0,0 +1,252 @@
+//! # SPARQL client
+
+use std::collections::HashMap;
+use std::io::BufRead;
+
+use ds_http_client::Client as HttpClient;
+use lazy_static::lazy_static;
+use oxrdf::Term;
+use reqwest::header::{HeaderMap, HeaderValue};
+use reqwest::Url;
+use sparesults::*;
+
+type SparqlSolution = HashMap<String, Term>;
+
+/* -------------------------------------------------------------
+ SPARQL client
+ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
+
+/// SPARQL client
+pub struct Client {
+ pub endpoint: String,
+ http_client: HttpClient,
+}
+
+impl Client {
+ pub fn new (endpoint: &str) -> Self {
+ let mut headers = HeaderMap::new();
+ headers.insert("User-Agent", HeaderValue::from_static(get_user_agent()));
+ let http_client = HttpClient::new(Some(headers));
+
+ Self::from_http_client(endpoint, http_client)
+ }
+
+ pub fn from_http_client(endpoint: &str, http_client: HttpClient) -> Self {
+ Self {
+ endpoint: String::from(endpoint),
+ http_client,
+ }
+ }
+
+ pub async fn query (&self, query: &str) -> SparqlResults {
+ let url = Url::parse_with_params(&self.endpoint, &[("query", query)])
+ .expect("Can't parse endpoint as absolute URL.");
+ let headers = self.get_query_headers();
+
+ let query_results = self.http_client
+ .get_with_headers(url, headers).await
+ .expect("Can't query endpoint")
+ .text().await
+ .expect("End-point didn't return a reply.");
+
+ parse_sparql_results(&query_results)
+ }
+
+ fn get_query_headers(&self) -> HashMap<String, String> {
+ let mut headers = HashMap::new();
+ headers.insert("Accept".to_string(), "Accept: application/sparql-results+xml".to_string());
+
+ headers
+ }
+}
+
+/* -------------------------------------------------------------
+ SPARQL query results
+ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
+
+/// Represent results for a SPARQL query
+/// A query can return a collection of solutions or a boolean.
+pub enum SparqlResults {
+ /// Results for SELECT queries
+ Solutions(Vec<SparqlSolution>),
+
+ /// Results for INSERT DATA, UPDATE DATA, etc. queries
+ Boolean(bool),
+}
+
+impl SparqlResults {
+ pub fn read<T>(reader: QueryResultsReader<T>) -> Self
+ where
+ T: BufRead
+ {
+ match reader {
+ QueryResultsReader::Solutions(solutions) => {
+ Self::Solutions(parse_sparql_solutions(solutions))
+ },
+ QueryResultsReader::Boolean(bool) => Self::Boolean(bool),
+ }
+ }
+
+ pub fn into_solutions(self) -> Option<Vec<SparqlSolution>> {
+ match self {
+ SparqlResults::Solutions(solutions) => Some(solutions),
+ SparqlResults::Boolean(_) => None,
+ }
+ }
+
+ pub fn into_bool(self) -> Option<bool> {
+ match self {
+ SparqlResults::Solutions(_) => None,
+ SparqlResults::Boolean(bool) => Some(bool),
+ }
+ }
+}
+
+pub fn parse_sparql_results (query_results: &str) -> SparqlResults {
+ let results_reader = get_query_results_xml_reader(query_results.as_bytes());
+
+ SparqlResults::read(results_reader)
+}
+
+fn get_query_results_xml_reader<T>(reader: T) -> QueryResultsReader<T>
+where T: BufRead
+{
+ QueryResultsParser::from_format(QueryResultsFormat::Xml)
+ .read_results(reader)
+ .expect("Can't read SPARQL results")
+}
+
+fn parse_sparql_solutions<T> (solutions: SolutionsReader<T>) -> Vec<SparqlSolution>
+where T: BufRead
+{
+ solutions
+ .map(|solution| {
+ parse_sparql_result(
+ solution.expect("Can't read solution")
+ )
+ })
+ .collect()
+}
+
+pub fn parse_sparql_result (solution: QuerySolution) -> SparqlSolution {
+ solution
+ .iter()
+ .map(|(variable, term)| (
+ variable.as_str().to_string(),
+ term.clone(),
+ ))
+ .collect()
+}
+
+pub fn parse_term_uri (term: &Term) -> Option<String> {
+ if let Term::NamedNode(node) = term {
+ Some(node.as_str().to_string())
+ } else {
+ None
+ }
+}
+
+pub fn parse_literal (term: &Term) -> Option<String> {
+ if let Term::Literal(literal) = term {
+ Some(literal.value().to_string())
+ } else {
+ None
+ }
+}
+
+/* -------------------------------------------------------------
+ Helper methods
+ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
+
+pub fn is_term_empty(term: &Term) -> bool {
+ match term {
+ Term::NamedNode(node) => {
+ // Special values IRI are considered as empty values.
+ node.as_str().contains("/.well-known/genid/")
+ }
+ Term::BlankNode(_) => true,
+ Term::Literal(_) => false,
+ Term::Triple(triple) => is_term_empty(&triple.object),
+ }
+}
+
+/* -------------------------------------------------------------
+ User agent
+
+ The USER_AGENT variable is computed at build time.
+ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
+
+lazy_static! {
+ pub static ref USER_AGENT: String = format!(
+ "{}/{}",
+ env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION")
+ );
+}
+
+/// Gets the default user agent
+pub fn get_user_agent () -> &'static str {
+ &USER_AGENT
+}
+
+/* -------------------------------------------------------------
+ Tests
+ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ pub fn test_parse_solution_results() {
+ let solutions_result = r#"
+<?xml version="1.0"?>
+<sparql xmlns="http://www.w3.org/2005/sparql-results#">
+ <head>
+ <variable name="foo"/>
+ </head>
+ <results>
+ <result>
+ <binding name="foo">
+ <literal xml:lang="en">bar</literal>
+ </binding>
+ </result>
+ </results>
+</sparql>
+ "#;
+
+ let results = parse_sparql_results(solutions_result);
+ let actual = results.into_solutions();
+
+ assert!(actual.is_some());
+
+ let solutions = actual.unwrap();
+ assert_eq!(1, solutions.iter().count());
+
+ let solution = solutions.first().unwrap();
+ assert_eq!(1, solution.iter().count());
+
+ // Asserts solution can be parsed as foo=bar
+ assert!(solution.contains_key("foo"));
+ let term = &solution["foo"];
+ assert!(term.is_literal());
+ let actual = parse_literal(term).unwrap();
+ assert!(actual.eq("bar"));
+ }
+
+ #[test]
+ pub fn test_parse_boolean_results () {
+ let boolean_results = r#"
+<?xml version="1.0"?>
+<sparql xmlns="http://www.w3.org/2005/sparql-results#">
+ <head />
+ <boolean>true</boolean>
+</sparql>
+ "#;
+
+ let results = parse_sparql_results(boolean_results);
+ let actual = results.into_bool();
+
+ assert!(actual.is_some());
+ assert!(actual.unwrap());
+ }
+}

File Metadata

Mime Type
text/plain
Expires
Fri, Dec 20, 12:06 (20 h, 41 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
2307900
Default Alt Text
D3424.id8841.diff (23 KB)

Event Timeline