Skip to content

Commit

Permalink
improve demo page
Browse files Browse the repository at this point in the history
cleanup code, remove comments
  • Loading branch information
vemonet committed Nov 26, 2023
1 parent 588b6e5 commit 65af28b
Show file tree
Hide file tree
Showing 7 changed files with 38 additions and 147 deletions.
3 changes: 1 addition & 2 deletions js/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,7 @@ <h1 class="text-xl font-semibold">Nanopublication dev</h1>
</div>

<script type="module">
// import init, { get_np_server, Nanopub, NpProfile } from "https://unpkg.com/@nanopub/sign";
import init, { Nanopub, NpProfile, get_np_server, KeyPair } from "./pkg/web.js";
import init, { Nanopub, NpProfile, getNpServer, KeyPair } from "./pkg/web.js";

const privKey=`MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCjY1gsFxmak6SOCouJPuEzHNForkqFhgfHE3aAIAx+Y5q6UDEDM9Q0EksheNffJB4iPqsAfiFpY0ARQY92K5r8P4+a78eu9reYrb2WxZb1qPJmvR7XZ6sN1oHD7dd/EyQoJmQsmOKdrqaLRbzR7tZrf52yvKkwNWXcIVhW8uxe7iUgxiojZpW9srKoK/qFRpaUZSKn7Z/zgtDH9FJkYbBsGPDMqp78Kzt+sJb+U2W+wCSSy34jIUxx6QRbzvn6uexc/emFw/1DU5y7zBudhgC7mVk8vX1gUNKyjZBzlOmRcretrANgffqs5fx/TMHN1xtkA/H1u1IKBfKoyk/xThMLAgMBAAECggEAECuG0GZA3HF8OaqFgMG+W+agOvH04h4Pqv4cHjYNxnxpFcNV9nEssTKWSOvCwYy7hrwZBGV3PQzbjFmmrxVFs20+8yCD7KbyKKQZPVC0zf84bj6NTNgvr6DpGtDxINxuGaMjCt7enqhoRyRRuZ0fj2gD3Wqae/Ds8cpDCefkyMg0TvauHSUj244vGq5nt93txUv1Sa+/8tWZ77Dm0s5a3wUYB2IeAMl5WrO2GMvgzwH+zT+4kvNWg5S0Ze4KE+dG3lSIYZjo99h14LcQS9eALC/VBcAJ6pRXaCTT/TULtcLNeOpoc9Fu25f0yTsDt6Ga5ApliYkb7rDhV+OFrw1sYQKBgQDCE9so+dPg7qbp0cV+lbb7rrV43m5s9Klq0riS7u8m71oTwhmvm6gSLfjzqb8GLrmflCK4lKPDSTdwyvd+2SSmOXySw94zr1Pvc7sHdmMRyA7mH3m+zSOOgyCTTKyhDRCNcRIkysoL+DecDhNo4Fumf71tsqDYogfxpAQhn0re8wKBgQDXhMmmT2oXiMnYHhi2k7CJe3HUqkZgmW4W44SWqKHp0V6sjcHm0N0RT5Hz1BFFUd5Y0ZB3JLcah19myD1kKYCj7xz6oVLb8O7LeAZNlb0FsrtD7NU+Hciywo8qESiA7UYDkU6+hsmxaI01DsttMIdG4lSBbEjA7t4IQC5lyr7xiQKBgQCN87YGJ40Y5ZXCSgOZDepz9hqX2KGOIfnUv2HvXsIfiUwqTXs6HbD18xg3KL4myIBOvywSM+4ABYp+foY+Cpcq2btLIeZhiWjsKIrw71+Q/vIe0YDb1PGf6DsoYhmWBpdHzR9HN+hGjvwlsYny2L9Qbfhgxxmsuf7zeFLpQLijjwKBgH7TD28k8IOk5VKec2CNjKd600OYaA3UfCpP/OhDl/RmVtYoHWDcrBrRvkvEEd2/DZ8qw165Zl7gJs3vK+FTYvYVcfIzGPWA1KU7nkntwewmf3i7V8lT8ZTwVRsmObWU60ySJ8qKuwoBQodki2VX12NpMN1wgWe3qUUlr6gLJU4xAoGAet6nD3QKwk6TTmcGVfSWOzvpaDEzGkXjCLaxLKh9GreM/OE+h5aN2gUoFeQapG5rUwI/7Qq0xiLbRXw+OmfAoV2XKv7iI8DjdIh0F06mlEAwQ/B0CpbqkuuxphIbchtdcz/5ra233r3BMNIqBl3VDDVoJlgHPg9msOTRy13lFqc=`;

Expand Down
7 changes: 6 additions & 1 deletion lib/docs/introduction.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,15 @@ On all platforms:
- 🪟 Windows
- 🦊 Web browsers

The library automatically handles most RDF serializations supporting graphs for the nanopub:
The library automatically handles most RDF serializations supporting quads for the nanopub:

- TriG
- Nquads
- JSON-LD

When signing a nanopub, some metadata in the pubinfo graph are created automatically if they are not already set in the RDF provided:

- Date and time of the Nanopublication creation using `dct:created`.
- ORCID of the creator using `dct:creator`, if an ORCID was provided in the profile used to sign the Nanopublication (we also check if the ORCID has been set with `prov:wasAttributedTo`, or `pav:createdBy`)

> 💡 If you are facing any problem, or have ideas to help improve this project, please [create an issue](https://github.com/vemonet/nanopub-rs/issues) on GitHub.
102 changes: 4 additions & 98 deletions lib/src/extract.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,11 +46,6 @@ impl fmt::Display for NpInfo {
}
}

// TODO: separate funtion just to extract uri and ns (to pass to serialize_rdf())
// pub fn extract_np_uri(dataset: &LightDataset) -> Result<(String, String), NpError> {
// let mut np_url: String = "".to_string();
// }

/// Extract graphs URLs from a nanopub: nanopub URL, head, assertion, prov, pubinfo
pub fn extract_np_info(dataset: &LightDataset, check_pubinfo: bool) -> Result<NpInfo, NpError> {
let mut np_url: String = "".to_string();
Expand Down Expand Up @@ -129,6 +124,7 @@ pub fn extract_np_info(dataset: &LightDataset, check_pubinfo: bool) -> Result<Np

// Getting potential ns from head graph (removing the last frag from head)
let original_ns = &head_iri[..np_iri.len()];
let np_ns = Namespace::new_unchecked(original_ns.to_string());

// Remove last char if it is # or / to get the URI
let np_iri: Iri<String> =
Expand Down Expand Up @@ -157,7 +153,6 @@ pub fn extract_np_info(dataset: &LightDataset, check_pubinfo: bool) -> Result<Np

// Get the base URI and separators from the namespace
let re_trusty_ns = Regex::new(r"^(.*?)(/|#|\.)?(RA[a-zA-Z0-9-_]*)?([#/\.])?$")?;
// let re = Regex::new(r"^(.*?)(RA.*)?$")?;
if let Some(caps) = re_trusty_ns.captures(original_ns) {
// The first group captures everything up to a '/' or '#', non-greedy.
base_uri = caps.get(1).map_or("", |m| m.as_str()).to_string();
Expand All @@ -167,7 +162,6 @@ pub fn extract_np_info(dataset: &LightDataset, check_pubinfo: bool) -> Result<Np
.map_or(separator_before_trusty, |m| m.as_str().to_string())
.to_string();
// The last group captures everything after 'RA', if present.
// trusty_hash = caps.get(3).map_or("", |m| m.as_str()).to_string();
separator_after_trusty = caps
.get(4)
.map_or(separator_after_trusty, |m| m.as_str().to_string())
Expand All @@ -176,14 +170,7 @@ pub fn extract_np_info(dataset: &LightDataset, check_pubinfo: bool) -> Result<Np
if trusty_hash.is_empty() && separator_after_trusty.is_empty() {
separator_after_trusty = "#".to_string()
};

let np_ns = Namespace::new_unchecked(original_ns.to_string());
// println!(
// "DEBUG: Extracted URI and namespace: {} {} {}",
// np_iri,
// np_ns.get("")?,
// trusty_hash
// );
// println!("DEBUG: Extracted URIs: {} {} {}", np_iri, np_ns.get("")?, trusty_hash);

// Generate normalized namespace without trusty
let norm_ns = if !trusty_hash.is_empty() {
Expand Down Expand Up @@ -319,7 +306,7 @@ pub fn extract_np_info(dataset: &LightDataset, check_pubinfo: bool) -> Result<Np
));
}

let np_info = NpInfo {
Ok(NpInfo {
uri: np_iri,
ns: np_ns,
normalized_ns: norm_ns,
Expand All @@ -336,86 +323,5 @@ pub fn extract_np_info(dataset: &LightDataset, check_pubinfo: bool) -> Result<Np
public_key: pubkey.unwrap_or("".to_string()),
algo: algo.unwrap_or("".to_string()),
orcid: orcid.unwrap_or("".to_string()),
};
// let _ = check_np_info(dataset, &np_info, check_pubinfo);
Ok(np_info)
}

pub fn check_np_info(
dataset: &LightDataset,
np_info: &NpInfo,
check_pubinfo: bool,
) -> Result<(), NpError> {
// Check minimal required triples in assertion, prov, pubinfo graphs
if dataset
.quads_matching(Any, Any, Any, [Some(np_info.assertion.clone())])
.next()
.is_none()
{
return Err(NpError(
"Invalid Nanopub: no triples in the assertion graph.".to_string(),
));
}
if dataset
.quads_matching(Any, Any, Any, [Some(np_info.prov.clone())])
.next()
.is_none()
{
return Err(NpError(
"Invalid Nanopub: no triples in the provenance graph.".to_string(),
));
}
if dataset
.quads_matching(
[np_info.assertion.clone()],
Any,
Any,
[Some(np_info.prov.clone())],
)
.next()
.is_none()
{
return Err(NpError("Invalid Nanopub: no triples with the assertion graph as subject in the provenance graph.".to_string()));
}
if check_pubinfo {
if dataset
.quads_matching(Any, Any, Any, [Some(np_info.pubinfo.clone())])
.next()
.is_none()
{
return Err(NpError(
"Invalid Nanopub: no triples in the pubinfo graph.".to_string(),
));
}
if dataset
.quads_matching(
[
np_info.uri.clone(),
Iri::new_unchecked(np_info.ns.get("")?.to_string()),
],
Any,
Any,
[Some(np_info.pubinfo.clone())],
)
.next()
.is_none()
{
return Err(NpError(
"Invalid Nanopub: no triples with the nanopub URI as subject in the pubinfo graph."
.to_string(),
));
};
}
let mut graph_names = HashSet::new();
for g in dataset.graph_names() {
if let Some(graph_name) = g?.iri() {
graph_names.insert(graph_name.to_string());
}
}
if graph_names.len() != 4 {
return Err(NpError(
format!("Invalid Nanopub: it should have 4 graphs (head, assertion, provenance, pubinfo), but the given nanopub has {} graphs.", graph_names.len())
));
}
Ok(())
})
}
28 changes: 10 additions & 18 deletions lib/src/nanopub.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::constants::{BOLD, END, NP_PREF_NS, NP_TEMP_URI, TEST_SERVER};
use crate::constants::{BOLD, END, NP_TEMP_URI, TEST_SERVER};
use crate::error::NpError;
use crate::extract::extract_np_info;
use crate::profile::{get_keys, get_pubkey_str, NpProfile};
use crate::profile::NpProfile;
use crate::publish::{fetch_np, publish_np};
use crate::sign::{make_trusty, normalize_dataset, replace_bnodes, replace_ns_in_quads};
use crate::utils::{ns, parse_rdf, serialize_rdf};
Expand All @@ -15,8 +15,7 @@ use serde::Serialize;
use sophia::api::dataset::{Dataset, MutableDataset};
use sophia::api::ns::{rdf, xsd, Namespace};
use sophia::api::term::matcher::Any;
use sophia::api::term::{SimpleTerm, Term};
// use sophia::api::;
use sophia::api::term::SimpleTerm;
use sophia::inmem::dataset::LightDataset;
use sophia::iri::{AsIriRef, Iri};
use std::{fmt, str};
Expand Down Expand Up @@ -217,9 +216,6 @@ impl Nanopub {
openssl_probe::init_ssl_cert_env_vars();
let mut dataset = rdf.get_dataset()?;

let (priv_key, pubkey) = get_keys(&profile.private_key)?;
let pubkey_str = get_pubkey_str(&pubkey)?;

// Extract graph URLs from the nanopub (fails if np not valid)
let np_info = extract_np_info(&dataset, false)?;

Expand All @@ -231,7 +227,7 @@ impl Nanopub {
dataset.insert(
np_info.ns.get("sig")?,
ns("npx").get("hasPublicKey")?,
&*pubkey_str,
&*profile.public_key,
Some(&np_info.pubinfo),
)?;
dataset.insert(
Expand All @@ -247,9 +243,7 @@ impl Nanopub {
Some(&np_info.pubinfo),
)?;

// TODO: if not already set, automatically add the current date to pubinfo created
// But there is an error when trying to cast the string to xsd::dateTime
// np_uri dct:created "2023-11-17T14:13:52.560Z"^^xsd:dateTime ;
// If not already set, automatically add the current date to pubinfo created
if dataset
.quads_matching(
[
Expand All @@ -265,15 +259,13 @@ impl Nanopub {
{
let now = Utc::now();
let datetime_str = now.format("%Y-%m-%dT%H:%M:%S%.3fZ").to_string();
// TODO: error when trying to convert to datetime
// TODO: there is an error when trying to cast the string to xsd::dateTime
// let lit_date = "2019" * xsd::dateTime;
// let lit_date = datetime_str.as_str() * xsd::dateTime;
let lit_date = SimpleTerm::LiteralDatatype(datetime_str.into(), xsd::dateTime.iriref());
dataset.insert(
np_info.ns.as_iri_ref(),
ns("dct").get("created")?,
lit_date,
// &*datetime_str * xsd::dateTime.iriref(),
SimpleTerm::LiteralDatatype(datetime_str.into(), xsd::dateTime.iriref()),
// datetime_str.as_str() * xsd::dateTime,
Some(&np_info.pubinfo),
)?;
}
Expand Down Expand Up @@ -315,7 +307,7 @@ impl Nanopub {
// println!("NORMED QUADS sign before add signature\n{}", norm_quads);

// Generate signature using the private key and normalized RDF
let signature_vec = priv_key.sign(
let signature_vec = profile.get_private_key()?.sign(
Pkcs1v15Sign::new::<Sha256>(),
&Sha256::digest(norm_quads.as_bytes()),
)?;
Expand Down Expand Up @@ -348,7 +340,7 @@ impl Nanopub {
rdf: rdf_str,
trusty_hash,
signature_hash,
public_key: pubkey_str,
public_key: profile.public_key.to_string(),
orcid: profile.orcid_id.to_string(),
published: false,
})
Expand Down
28 changes: 17 additions & 11 deletions lib/src/profile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,16 @@ pub struct NpProfile {
}

impl NpProfile {
/// Create a new Nanopub profile
pub fn new(
orcid_id: &str,
name: &str,
private_key: &str,
introduction_nanopub_uri: Option<&str>,
) -> Result<Self, NpError> {
let (_priv_key, pubkey) = get_keys(private_key)?;
let privkey =
RsaPrivateKey::from_pkcs8_der(&engine::general_purpose::STANDARD.decode(private_key)?)?;
let pubkey = RsaPublicKey::from(&privkey);
Ok(Self {
orcid_id: orcid_id.to_string(),
name: name.to_string(),
Expand All @@ -37,7 +40,7 @@ impl NpProfile {
})
}

/// Extract profile from YAML file
/// Create a Nanopub profile from a YAML file
pub fn from_file(filepath: &str) -> Result<Self, NpError> {
let filepath = if filepath.is_empty() {
// Default to home dir if nothing provided
Expand All @@ -59,6 +62,17 @@ impl NpProfile {
profile.public_key = normalize_key(&fs::read_to_string(&profile.public_key)?)?;
Ok(profile)
}

/// Get the private key as `RsaPrivateKey` struct
pub fn get_private_key(&self) -> Result<RsaPrivateKey, NpError> {
Ok(RsaPrivateKey::from_pkcs8_der(
&engine::general_purpose::STANDARD.decode(&self.private_key)?,
)?)
}
/// Get the public key as `RsaPublicKey` struct
pub fn get_public_key(&self) -> Result<RsaPublicKey, NpError> {
Ok(RsaPublicKey::from(&self.get_private_key()?))
}
}

impl fmt::Display for NpProfile {
Expand All @@ -81,15 +95,7 @@ impl fmt::Display for NpProfile {
}
}

/// Get `RsaPrivateKey` and `RsaPublicKey` given a private key string
pub fn get_keys(private_key: &str) -> Result<(RsaPrivateKey, RsaPublicKey), NpError> {
let priv_key_bytes = engine::general_purpose::STANDARD.decode(private_key)?;
let priv_key = RsaPrivateKey::from_pkcs8_der(&priv_key_bytes)?;
let public_key = RsaPublicKey::from(&priv_key);
Ok((priv_key, public_key))
}

/// Normalize private/public keys (no prefix, no suffix, no newline)
/// Normalize a private or public key string (remove prefix, suffix, newlines)
pub fn normalize_key(key: &str) -> Result<String, NpError> {
let mut normed_key = key.trim().to_string();
let start_patterns = [
Expand Down
2 changes: 0 additions & 2 deletions lib/src/publish.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@ pub async fn publish_np(server: &str, np: &str) -> Result<bool, NpError> {
.header(reqwest::header::CONTENT_TYPE, "application/trig")
.send()
.await?;
// println!("DEBUG: publish resp: {:#?}", res);
// Ok(res.status() == 201)
match res.status() {
reqwest::StatusCode::CREATED => Ok(true),
_ => {
Expand Down
15 changes: 0 additions & 15 deletions lib/src/sign.rs
Original file line number Diff line number Diff line change
Expand Up @@ -155,10 +155,6 @@ pub fn replace_ns_in_quads(
new_ns: &str,
new_uri: &str,
) -> Result<LightDataset, NpError> {
// println!(
// "DEBUG: REPLACE_NS_IN_QUADS: Old ns: {} old_uri: {} new ns: {} new_uri: {}",
// old_ns, old_uri, new_ns, new_uri
// );
let mut new = LightDataset::new();
for quad in dataset.quads() {
let quad = quad?;
Expand Down Expand Up @@ -227,21 +223,10 @@ struct NormQuad {
pub fn fix_normed_uri(uri: &str, separator: &str) -> String {
if let Some(space_index) = uri.rfind(' ') {
let last_frag = &uri[space_index + 1..];
// println!(
// "DEBUG: last frag: '{}' URI: '{}' SEP: '{}'",
// last_frag, uri, separator
// );
if uri.ends_with(&format!(" {separator}")) || last_frag.is_empty() {
uri.strip_suffix(separator).unwrap_or(uri).to_string()
} else if last_frag.starts_with(separator) {
uri.to_string()
// TODO: remove those checks, there are not useful anymore?
// } else if last_frag.starts_with('/') || last_frag.starts_with('.') {
// format!(
// "{} {separator}{}",
// &uri[..space_index],
// &uri[space_index + 2..]
// )
} else {
format!("{} {separator}{}", &uri[..space_index], last_frag)
}
Expand Down

0 comments on commit 65af28b

Please sign in to comment.