diff --git a/CHANGELOG.md b/CHANGELOG.md index 33204fa7..091b18fd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] +## [[0.8.6]](https://github.com/thoth-pub/thoth/releases/tag/v0.8.6) - 2022-07-01 +### Added + - [#390](https://github.com/thoth-pub/thoth/pull/390) - Implement OverDrive ONIX 3.0 specification + +### Fixed + - [#392](https://github.com/thoth-pub/thoth/issues/392) - Fix encoding of print ISBN in JSTOR ONIX output + ## [[0.8.5]](https://github.com/thoth-pub/thoth/releases/tag/v0.8.5) - 2022-05-30 ### Added - [#287](https://github.com/thoth-pub/thoth/issues/287) - Allow editing contributions (and affiliations) @@ -17,7 +24,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [[0.8.4]](https://github.com/thoth-pub/thoth/releases/tag/v0.8.4) - 2022-05-11 ### Added - [#29](https://github.com/thoth-pub/thoth/issues/29) - Implement CrossRef DOI Deposit specification - - [#72](https://github.com/thoth-pub/thoth/issues/72) - Implement CrossRef Google Books ONIX 3.0 specification + - [#72](https://github.com/thoth-pub/thoth/issues/72) - Implement Google Books ONIX 3.0 specification ### Changed - [#356](https://github.com/thoth-pub/thoth/issues/356) - Upgrade actix to v4 diff --git a/Cargo.lock b/Cargo.lock index 125dca88..2c52be8f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3788,7 +3788,7 @@ dependencies = [ [[package]] name = "thoth" -version = "0.8.5" +version = "0.8.6" dependencies = [ "cargo-husky", "clap", @@ -3803,7 +3803,7 @@ dependencies = [ [[package]] name = "thoth-api" -version = "0.8.5" +version = "0.8.6" dependencies = [ "actix-web", "argon2rs", @@ -3832,7 +3832,7 @@ dependencies = [ [[package]] name = "thoth-api-server" -version = "0.8.5" +version = "0.8.6" dependencies = [ "actix-cors", "actix-identity", @@ -3847,7 +3847,7 @@ dependencies = [ [[package]] name = "thoth-app" -version = "0.8.5" +version = "0.8.6" dependencies = [ "anyhow", "chrono", @@ -3870,7 +3870,7 @@ dependencies = [ [[package]] name = "thoth-app-server" -version = "0.8.5" +version = "0.8.6" dependencies = [ "actix-cors", "actix-web", @@ -3879,7 +3879,7 @@ dependencies = [ [[package]] name = "thoth-client" -version = "0.8.5" +version = "0.8.6" dependencies = [ "chrono", "graphql_client", @@ -3893,7 +3893,7 @@ dependencies = [ [[package]] name = "thoth-errors" -version = "0.8.5" +version = "0.8.6" dependencies = [ "actix-web", "csv", @@ -3908,7 +3908,7 @@ dependencies = [ [[package]] name = "thoth-export-server" -version = "0.8.5" +version = "0.8.6" dependencies = [ "actix-cors", "actix-web", diff --git a/Cargo.toml b/Cargo.toml index 9b754ae9..032fd747 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "thoth" -version = "0.8.5" +version = "0.8.6" authors = ["Javier Arias ", "Ross Higman "] edition = "2018" license = "Apache-2.0" @@ -16,11 +16,11 @@ maintenance = { status = "actively-developed" } members = ["thoth-api", "thoth-api-server", "thoth-app", "thoth-app-server", "thoth-client", "thoth-errors", "thoth-export-server"] [dependencies] -thoth-api = { version = "0.8.5", path = "thoth-api", features = ["backend"] } -thoth-api-server = { version = "0.8.5", path = "thoth-api-server" } -thoth-app-server = { version = "0.8.5", path = "thoth-app-server" } -thoth-errors = { version = "0.8.5", path = "thoth-errors" } -thoth-export-server = { version = "0.8.5", path = "thoth-export-server" } +thoth-api = { version = "0.8.6", path = "thoth-api", features = ["backend"] } +thoth-api-server = { version = "0.8.6", path = "thoth-api-server" } +thoth-app-server = { version = "0.8.6", path = "thoth-app-server" } +thoth-errors = { version = "0.8.6", path = "thoth-errors" } +thoth-export-server = { version = "0.8.6", path = "thoth-export-server" } clap = "2.33.3" dialoguer = "0.7.1" dotenv = "0.9.0" diff --git a/README.md b/README.md index aae72b5d..fe0c9cb7 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@
- +

Thoth

diff --git a/thoth-api-server/Cargo.toml b/thoth-api-server/Cargo.toml index d2797b77..d889104e 100644 --- a/thoth-api-server/Cargo.toml +++ b/thoth-api-server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "thoth-api-server" -version = "0.8.5" +version = "0.8.6" authors = ["Javier Arias ", "Ross Higman "] edition = "2018" license = "Apache-2.0" @@ -9,8 +9,8 @@ repository = "https://github.com/thoth-pub/thoth" readme = "README.md" [dependencies] -thoth-api = { version = "0.8.5", path = "../thoth-api", features = ["backend"] } -thoth-errors = { version = "0.8.5", path = "../thoth-errors" } +thoth-api = { version = "0.8.6", path = "../thoth-api", features = ["backend"] } +thoth-errors = { version = "0.8.6", path = "../thoth-errors" } actix-web = "4.0.1" actix-cors = "0.6.0" actix-identity = "0.4.0" diff --git a/thoth-api-server/README.md b/thoth-api-server/README.md index 1da4ba12..82a4075b 100644 --- a/thoth-api-server/README.md +++ b/thoth-api-server/README.md @@ -1,5 +1,5 @@
- +

Thoth Client

diff --git a/thoth-api/Cargo.toml b/thoth-api/Cargo.toml index 7cb02775..08becb1c 100644 --- a/thoth-api/Cargo.toml +++ b/thoth-api/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "thoth-api" -version = "0.8.5" +version = "0.8.6" authors = ["Javier Arias ", "Ross Higman "] edition = "2018" license = "Apache-2.0" @@ -16,7 +16,7 @@ maintenance = { status = "actively-developed" } backend = ["diesel", "diesel-derive-enum", "diesel_migrations", "futures", "actix-web"] [dependencies] -thoth-errors = { version = "0.8.5", path = "../thoth-errors" } +thoth-errors = { version = "0.8.6", path = "../thoth-errors" } actix-web = { version = "4.0.1", optional = true } argon2rs = "0.2.5" isbn2 = "0.4.0" diff --git a/thoth-api/README.md b/thoth-api/README.md index f3b3be2f..83999092 100644 --- a/thoth-api/README.md +++ b/thoth-api/README.md @@ -1,5 +1,5 @@
- +

Thoth API

diff --git a/thoth-api/src/model/mod.rs b/thoth-api/src/model/mod.rs index 318b7216..2c22b2a4 100644 --- a/thoth-api/src/model/mod.rs +++ b/thoth-api/src/model/mod.rs @@ -384,7 +384,7 @@ where #[macro_export] macro_rules! crud_methods { ($table_dsl:expr, $entity_dsl:expr) => { - fn from_id(db: &crate::db::PgPool, entity_id: &Uuid) -> ThothResult { + fn from_id(db: &$crate::db::PgPool, entity_id: &Uuid) -> ThothResult { use diesel::{QueryDsl, RunQueryDsl}; let connection = db.get().unwrap(); @@ -394,7 +394,7 @@ macro_rules! crud_methods { } } - fn create(db: &crate::db::PgPool, data: &Self::NewEntity) -> ThothResult { + fn create(db: &$crate::db::PgPool, data: &Self::NewEntity) -> ThothResult { use diesel::RunQueryDsl; let connection = db.get().unwrap(); @@ -411,7 +411,7 @@ macro_rules! crud_methods { /// history entity record. fn update( &self, - db: &crate::db::PgPool, + db: &$crate::db::PgPool, data: &Self::PatchEntity, account_id: &Uuid, ) -> ThothResult { @@ -432,7 +432,7 @@ macro_rules! crud_methods { }) } - fn delete(self, db: &crate::db::PgPool) -> ThothResult { + fn delete(self, db: &$crate::db::PgPool) -> ThothResult { use diesel::{QueryDsl, RunQueryDsl}; let connection = db.get().unwrap(); diff --git a/thoth-app-server/Cargo.toml b/thoth-app-server/Cargo.toml index 36a95a8e..1eede4b9 100644 --- a/thoth-app-server/Cargo.toml +++ b/thoth-app-server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "thoth-app-server" -version = "0.8.5" +version = "0.8.6" authors = ["Javier Arias ", "Ross Higman "] edition = "2018" license = "Apache-2.0" diff --git a/thoth-app-server/README.md b/thoth-app-server/README.md index afaa1e99..da436caf 100644 --- a/thoth-app-server/README.md +++ b/thoth-app-server/README.md @@ -1,5 +1,5 @@
- +

Thoth Client

diff --git a/thoth-app/Cargo.toml b/thoth-app/Cargo.toml index 7cb4b8c6..d81594d8 100644 --- a/thoth-app/Cargo.toml +++ b/thoth-app/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "thoth-app" -version = "0.8.5" +version = "0.8.6" authors = ["Javier Arias ", "Ross Higman "] edition = "2018" license = "Apache-2.0" @@ -33,5 +33,5 @@ serde = { version = "1.0.115", features = ["derive"] } serde_json = "1.0" url = "2.1.1" uuid = { version = "0.7", features = ["serde", "v4"] } -thoth-api = { version = "0.8.5", path = "../thoth-api" } -thoth-errors = { version = "0.8.5", path = "../thoth-errors" } +thoth-api = { version = "0.8.6", path = "../thoth-api" } +thoth-errors = { version = "0.8.6", path = "../thoth-errors" } diff --git a/thoth-app/README.md b/thoth-app/README.md index 94986101..36a28b9a 100644 --- a/thoth-app/README.md +++ b/thoth-app/README.md @@ -1,5 +1,5 @@
- +

Thoth APP

diff --git a/thoth-app/manifest.json b/thoth-app/manifest.json index 4ca8c26c..bd0ff0ef 100644 --- a/thoth-app/manifest.json +++ b/thoth-app/manifest.json @@ -9,7 +9,7 @@ "start_url": "/?homescreen=1", "background_color": "#ffffff", "theme_color": "#ffdd57", - "version": "0.8.5", + "version": "0.8.6", "icons": [ { "src": "\/android-icon-36x36.png", diff --git a/thoth-app/src/component/mod.rs b/thoth-app/src/component/mod.rs index 3a52509a..4d6ca5ef 100644 --- a/thoth-app/src/component/mod.rs +++ b/thoth-app/src/component/mod.rs @@ -1,8 +1,8 @@ #[macro_export] macro_rules! pagination_helpers { ($component:ident, $pagination_text:ident, $search_text:ident) => { - use crate::string::$pagination_text; - use crate::string::$search_text; + use $crate::string::$pagination_text; + use $crate::string::$search_text; impl $component { fn search_text(&self) -> String { @@ -36,11 +36,11 @@ macro_rules! pagination_helpers { { crate::string::PREVIOUS_PAGE_BUTTON } + >{ $crate::string::PREVIOUS_PAGE_BUTTON } { crate::string::NEXT_PAGE_BUTTON } + >{ $crate::string::NEXT_PAGE_BUTTON }

@@ -102,10 +102,10 @@ macro_rules! pagination_component { use yewtil::future::LinkFuture; use yewtil::NeqAssign; - use crate::component::utils::Loader; - use crate::component::utils::Reloader; - use crate::models::{EditRoute, CreateRoute, MetadataTable}; - use crate::route::AppRoute; + use $crate::component::utils::Loader; + use $crate::component::utils::Reloader; + use $crate::models::{EditRoute, CreateRoute, MetadataTable}; + use $crate::route::AppRoute; pub struct $component { limit: i32, diff --git a/thoth-app/src/models/mod.rs b/thoth-app/src/models/mod.rs index 26c61be3..52085272 100644 --- a/thoth-app/src/models/mod.rs +++ b/thoth-app/src/models/mod.rs @@ -16,7 +16,7 @@ macro_rules! graphql_query_builder { use yewtil::fetch::Json; use yewtil::fetch::MethodBody; - use crate::THOTH_GRAPHQL_API; + use $crate::THOTH_GRAPHQL_API; pub type $fetch = Fetch<$request, $response_body>; pub type $fetch_action = FetchAction<$response_body>; @@ -51,7 +51,7 @@ macro_rules! graphql_query_builder { } fn headers(&self) -> Vec<(String, String)> { - use crate::service::account::AccountService; + use $crate::service::account::AccountService; let account_service = AccountService::new(); let json = ("Content-Type".into(), "application/json".into()); diff --git a/thoth-app/src/models/work/mod.rs b/thoth-app/src/models/work/mod.rs index 4131bc96..fcee4b0c 100644 --- a/thoth-app/src/models/work/mod.rs +++ b/thoth-app/src/models/work/mod.rs @@ -122,6 +122,7 @@ pub trait DisplayWork { fn onix_oapen_endpoint(&self) -> String; fn onix_jstor_endpoint(&self) -> String; fn onix_google_books_endpoint(&self) -> String; + fn onix_overdrive_endpoint(&self) -> String; fn onix_ebsco_host_endpoint(&self) -> String; fn csv_endpoint(&self) -> String; fn kbart_endpoint(&self) -> String; @@ -162,6 +163,13 @@ impl DisplayWork for WorkWithRelations { ) } + fn onix_overdrive_endpoint(&self) -> String { + format!( + "{}/specifications/onix_3.0::overdrive/work/{}", + THOTH_EXPORT_API, &self.work_id + ) + } + fn onix_ebsco_host_endpoint(&self) -> String { format!( "{}/specifications/onix_2.1::ebsco_host/work/{}", @@ -410,6 +418,12 @@ impl DisplayWork for WorkWithRelations { > {"ONIX 3.0 (Google Books)"} + + {"ONIX 3.0 (OverDrive)"} + ", "Ross Higman "] edition = "2018" license = "Apache-2.0" @@ -9,8 +9,8 @@ repository = "https://github.com/thoth-pub/thoth" readme = "README.md" [dependencies] -thoth-api = {version = "0.8.5", path = "../thoth-api" } -thoth-errors = {version = "0.8.5", path = "../thoth-errors" } +thoth-api = {version = "0.8.6", path = "../thoth-api" } +thoth-errors = {version = "0.8.6", path = "../thoth-errors" } graphql_client = "0.9.0" chrono = { version = "0.4", features = ["serde"] } reqwest = { version = "0.11", features = ["json"] } diff --git a/thoth-client/README.md b/thoth-client/README.md index f530d217..9844a7bf 100644 --- a/thoth-client/README.md +++ b/thoth-client/README.md @@ -1,5 +1,5 @@

- +

Thoth Client

diff --git a/thoth-errors/Cargo.toml b/thoth-errors/Cargo.toml index 4737dac2..1170f142 100644 --- a/thoth-errors/Cargo.toml +++ b/thoth-errors/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "thoth-errors" -version = "0.8.5" +version = "0.8.6" authors = ["Javier Arias ", "Ross Higman "] edition = "2018" license = "Apache-2.0" diff --git a/thoth-errors/README.md b/thoth-errors/README.md index 94f45b6e..0d4b8a50 100644 --- a/thoth-errors/README.md +++ b/thoth-errors/README.md @@ -1,5 +1,5 @@
- +

Thoth Errors

diff --git a/thoth-export-server/Cargo.toml b/thoth-export-server/Cargo.toml index dea1c61a..c17c7771 100644 --- a/thoth-export-server/Cargo.toml +++ b/thoth-export-server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "thoth-export-server" -version = "0.8.5" +version = "0.8.6" authors = ["Javier Arias ", "Ross Higman "] edition = "2018" license = "Apache-2.0" @@ -9,9 +9,9 @@ repository = "https://github.com/thoth-pub/thoth" readme = "README.md" [dependencies] -thoth-api = { version = "0.8.5", path = "../thoth-api" } -thoth-errors = { version = "0.8.5", path = "../thoth-errors" } -thoth-client = { version = "0.8.5", path = "../thoth-client" } +thoth-api = { version = "0.8.6", path = "../thoth-api" } +thoth-errors = { version = "0.8.6", path = "../thoth-errors" } +thoth-client = { version = "0.8.6", path = "../thoth-client" } actix-web = "4.0.1" actix-cors = "0.6.0" chrono = { version = "0.4", features = ["serde"] } diff --git a/thoth-export-server/README.md b/thoth-export-server/README.md index 33b0f498..12b9c0bc 100644 --- a/thoth-export-server/README.md +++ b/thoth-export-server/README.md @@ -1,5 +1,5 @@
- +

Thoth Export API

diff --git a/thoth-export-server/src/data.rs b/thoth-export-server/src/data.rs index 8fab35bf..5869f399 100644 --- a/thoth-export-server/src/data.rs +++ b/thoth-export-server/src/data.rs @@ -37,6 +37,12 @@ lazy_static! { format: concat!(env!("THOTH_EXPORT_API"), "/formats/onix_3.0"), accepted_by: vec![concat!(env!("THOTH_EXPORT_API"), "/platforms/google_books"),], }, + Specification { + id: "onix_3.0::overdrive", + name: "Google Books ONIX 3.0", + format: concat!(env!("THOTH_EXPORT_API"), "/formats/onix_3.0"), + accepted_by: vec![concat!(env!("THOTH_EXPORT_API"), "/platforms/overdrive"),], + }, Specification { id: "onix_2.1::ebsco_host", name: "EBSCO Host ONIX 2.1", @@ -126,6 +132,14 @@ lazy_static! { "/specifications/onix_3.0::google_books" ),], }, + Platform { + id: "overdrive", + name: "OverDrive", + accepts: vec![concat!( + env!("THOTH_EXPORT_API"), + "/specifications/onix_3.0::overdrive" + ),], + }, Platform { id: "ebsco_host", name: "EBSCO Host", @@ -207,6 +221,10 @@ lazy_static! { env!("THOTH_EXPORT_API"), "/specifications/onix_3.0::google_books" ), + concat!( + env!("THOTH_EXPORT_API"), + "/specifications/onix_3.0::overdrive" + ), ], }, Format { diff --git a/thoth-export-server/src/record.rs b/thoth-export-server/src/record.rs index 85891cbe..51627191 100644 --- a/thoth-export-server/src/record.rs +++ b/thoth-export-server/src/record.rs @@ -11,7 +11,7 @@ use thoth_errors::{ThothError, ThothResult}; use crate::bibtex::{BibtexSpecification, BibtexThoth}; use crate::csv::{CsvSpecification, CsvThoth, KbartOclc}; use crate::xml::{ - DoiDepositCrossref, Onix21EbscoHost, Onix3GoogleBooks, Onix3Jstor, Onix3Oapen, + DoiDepositCrossref, Onix21EbscoHost, Onix3GoogleBooks, Onix3Jstor, Onix3Oapen, Onix3Overdrive, Onix3ProjectMuse, XmlSpecification, }; @@ -28,6 +28,7 @@ pub(crate) enum MetadataSpecification { Onix3Oapen(Onix3Oapen), Onix3Jstor(Onix3Jstor), Onix3GoogleBooks(Onix3GoogleBooks), + Onix3Overdrive(Onix3Overdrive), Onix21EbscoHost(Onix21EbscoHost), CsvThoth(CsvThoth), KbartOclc(KbartOclc), @@ -68,6 +69,7 @@ where MetadataSpecification::Onix3Oapen(_) => Self::XML_MIME_TYPE, MetadataSpecification::Onix3Jstor(_) => Self::XML_MIME_TYPE, MetadataSpecification::Onix3GoogleBooks(_) => Self::XML_MIME_TYPE, + MetadataSpecification::Onix3Overdrive(_) => Self::XML_MIME_TYPE, MetadataSpecification::Onix21EbscoHost(_) => Self::XML_MIME_TYPE, MetadataSpecification::CsvThoth(_) => Self::CSV_MIME_TYPE, MetadataSpecification::KbartOclc(_) => Self::TXT_MIME_TYPE, @@ -82,6 +84,7 @@ where MetadataSpecification::Onix3Oapen(_) => self.xml_file_name(), MetadataSpecification::Onix3Jstor(_) => self.xml_file_name(), MetadataSpecification::Onix3GoogleBooks(_) => self.xml_file_name(), + MetadataSpecification::Onix3Overdrive(_) => self.xml_file_name(), MetadataSpecification::Onix21EbscoHost(_) => self.xml_file_name(), MetadataSpecification::CsvThoth(_) => self.csv_file_name(), MetadataSpecification::KbartOclc(_) => self.txt_file_name(), @@ -135,6 +138,9 @@ impl MetadataRecord> { MetadataSpecification::Onix3GoogleBooks(onix3_google_books) => { onix3_google_books.generate(&self.data, None) } + MetadataSpecification::Onix3Overdrive(onix3_overdrive) => { + onix3_overdrive.generate(&self.data, None) + } MetadataSpecification::Onix21EbscoHost(onix21_ebsco_host) => { onix21_ebsco_host.generate(&self.data, Some(DOCTYPE_ONIX21_REF)) } @@ -198,6 +204,7 @@ impl FromStr for MetadataSpecification { "onix_3.0::google_books" => { Ok(MetadataSpecification::Onix3GoogleBooks(Onix3GoogleBooks {})) } + "onix_3.0::overdrive" => Ok(MetadataSpecification::Onix3Overdrive(Onix3Overdrive {})), "onix_2.1::ebsco_host" => { Ok(MetadataSpecification::Onix21EbscoHost(Onix21EbscoHost {})) } @@ -219,6 +226,7 @@ impl ToString for MetadataSpecification { MetadataSpecification::Onix3Oapen(_) => "onix_3.0::oapen".to_string(), MetadataSpecification::Onix3Jstor(_) => "onix_3.0::jstor".to_string(), MetadataSpecification::Onix3GoogleBooks(_) => "onix_3.0::google_books".to_string(), + MetadataSpecification::Onix3Overdrive(_) => "onix_3.0::overdrive".to_string(), MetadataSpecification::Onix21EbscoHost(_) => "onix_2.1::ebsco_host".to_string(), MetadataSpecification::CsvThoth(_) => "csv::thoth".to_string(), MetadataSpecification::KbartOclc(_) => "kbart::oclc".to_string(), @@ -288,6 +296,15 @@ mod tests { to_test.file_name(), "onix_3.0__google_books__some_id.xml".to_string() ); + let to_test = MetadataRecord::new( + "some_id".to_string(), + MetadataSpecification::Onix3Overdrive(Onix3Overdrive {}), + vec![], + ); + assert_eq!( + to_test.file_name(), + "onix_3.0__overdrive__some_id.xml".to_string() + ); let to_test = MetadataRecord::new( "some_id".to_string(), MetadataSpecification::Onix21EbscoHost(Onix21EbscoHost {}), diff --git a/thoth-export-server/src/xml/mod.rs b/thoth-export-server/src/xml/mod.rs index 5a1066c4..ce046d87 100644 --- a/thoth-export-server/src/xml/mod.rs +++ b/thoth-export-server/src/xml/mod.rs @@ -87,6 +87,8 @@ mod onix3_jstor; pub(crate) use onix3_jstor::Onix3Jstor; mod onix3_google_books; pub(crate) use onix3_google_books::Onix3GoogleBooks; +mod onix3_overdrive; +pub(crate) use onix3_overdrive::Onix3Overdrive; mod onix21_ebsco_host; pub(crate) use onix21_ebsco_host::Onix21EbscoHost; mod doideposit_crossref; diff --git a/thoth-export-server/src/xml/onix21_ebsco_host.rs b/thoth-export-server/src/xml/onix21_ebsco_host.rs index 094e3b0d..08702a87 100644 --- a/thoth-export-server/src/xml/onix21_ebsco_host.rs +++ b/thoth-export-server/src/xml/onix21_ebsco_host.rs @@ -739,6 +739,7 @@ mod tests { // Test standard output let output = generate_test_output(true, &test_language); + assert!(output.contains(r#""#)); assert!(output.contains(r#" 02"#)); assert!(output.contains(r#" spa"#)); @@ -750,6 +751,7 @@ mod tests { ] { test_language.language_relation = language_relation; let output = generate_test_output(true, &test_language); + assert!(output.contains(r#""#)); assert!(output.contains(r#" 01"#)); assert!(output.contains(r#" wel"#)); } diff --git a/thoth-export-server/src/xml/onix3_google_books.rs b/thoth-export-server/src/xml/onix3_google_books.rs index 69d0e2fd..07179175 100644 --- a/thoth-export-server/src/xml/onix3_google_books.rs +++ b/thoth-export-server/src/xml/onix3_google_books.rs @@ -397,13 +397,14 @@ impl XmlElementBlock for Work { }) .map(|pr| pr.unit_price) { + let formatted_price = format!("{:.2}", price); write_element_block("Price", w, |w| { // 02 RRP including tax write_element_block("PriceType", w, |w| { w.write(XmlEvent::Characters("02")).map_err(|e| e.into()) })?; write_element_block("PriceAmount", w, |w| { - w.write(XmlEvent::Characters(&price.to_string())) + w.write(XmlEvent::Characters(&formatted_price)) .map_err(|e| e.into()) })?; write_element_block("CurrencyCode", w, |w| { @@ -732,6 +733,7 @@ mod tests { // Test standard output let output = generate_test_output(true, &test_language); + assert!(output.contains(r#""#)); assert!(output.contains(r#" 02"#)); assert!(output.contains(r#" spa"#)); @@ -743,6 +745,7 @@ mod tests { ] { test_language.language_relation = language_relation; let output = generate_test_output(true, &test_language); + assert!(output.contains(r#""#)); assert!(output.contains(r#" 01"#)); assert!(output.contains(r#" wel"#)); } @@ -887,7 +890,7 @@ mod tests { }, WorkPublicationsPrices { currency_code: CurrencyCode::GBP, - unit_price: 4.95, + unit_price: 5.0, }, ], locations: vec![WorkPublicationsLocations { @@ -1021,7 +1024,7 @@ mod tests { assert!(output.contains(r#" 20"#)); assert!(output.contains(r#" "#)); assert!(output.contains(r#" 02"#)); - assert!(output.contains(r#" 4.95"#)); + assert!(output.contains(r#" 5.00"#)); assert!(output.contains(r#" GBP"#)); assert!(output.contains(r#" "#)); assert!(output.contains(r#" WORLD"#)); @@ -1054,7 +1057,7 @@ mod tests { // No GBP price supplied assert!(!output.contains(r#" "#)); assert!(!output.contains(r#" 02"#)); - assert!(!output.contains(r#" 4.95"#)); + assert!(!output.contains(r#" 5.00"#)); assert!(!output.contains(r#" GBP"#)); assert!(!output.contains(r#" "#)); assert!(!output.contains(r#" WORLD"#)); diff --git a/thoth-export-server/src/xml/onix3_jstor.rs b/thoth-export-server/src/xml/onix3_jstor.rs index 72170970..9c2dc667 100644 --- a/thoth-export-server/src/xml/onix3_jstor.rs +++ b/thoth-export-server/src/xml/onix3_jstor.rs @@ -71,7 +71,7 @@ impl XmlElementBlock for Work { .and_then(|l| l.full_text_url.as_ref()) { let work_id = format!("urn:uuid:{}", self.work_id); - let (main_isbn, isbns) = get_publications_data(&self.publications); + let (main_isbn, print_isbn) = get_publications_data(&self.publications); write_element_block("Product", w, |w| { write_element_block("RecordReference", w, |w| { w.write(XmlEvent::Characters(&work_id)) @@ -295,26 +295,25 @@ impl XmlElementBlock for Work { } Ok(()) })?; - if !isbns.is_empty() { + if !print_isbn.is_empty() { write_element_block("RelatedMaterial", w, |w| { - for isbn in &isbns { - write_element_block("RelatedProduct", w, |w| { - // 06 Alternative format - write_element_block("ProductRelationCode", w, |w| { - w.write(XmlEvent::Characters("06")).map_err(|e| e.into()) + // The only RelatedProduct supported by JSTOR is the print ISBN (if any). + write_element_block("RelatedProduct", w, |w| { + // 13 Epublication based on (print product) + write_element_block("ProductRelationCode", w, |w| { + w.write(XmlEvent::Characters("13")).map_err(|e| e.into()) + })?; + write_element_block("ProductIdentifier", w, |w| { + // 15 ISBN-13 + write_element_block("ProductIDType", w, |w| { + w.write(XmlEvent::Characters("15")).map_err(|e| e.into()) })?; - write_element_block("ProductIdentifier", w, |w| { - // 15 ISBN-13 - write_element_block("ProductIDType", w, |w| { - w.write(XmlEvent::Characters("15")).map_err(|e| e.into()) - })?; - write_element_block("IDValue", w, |w| { - w.write(XmlEvent::Characters(isbn)).map_err(|e| e.into()) - }) + write_element_block("IDValue", w, |w| { + w.write(XmlEvent::Characters(&print_isbn)) + .map_err(|e| e.into()) }) - })?; - } - Ok(()) + }) + }) })?; } write_element_block("ProductSupply", w, |w| { @@ -385,26 +384,32 @@ impl XmlElementBlock for Work { } } -fn get_publications_data(publications: &[WorkPublications]) -> (String, Vec) { - let mut main_isbn = "".to_string(); - let mut isbns: Vec = Vec::new(); +fn get_publications_data(publications: &[WorkPublications]) -> (String, String) { + let pdf_isbn = publications + .iter() + .find(|p| p.publication_type.eq(&PublicationType::PDF)) + .and_then(|p| p.isbn.as_ref()); + let paperback_isbn = publications + .iter() + .find(|p| p.publication_type.eq(&PublicationType::PAPERBACK)) + .and_then(|p| p.isbn.as_ref()); + let hardback_isbn = publications + .iter() + .find(|p| p.publication_type.eq(&PublicationType::HARDBACK)) + .and_then(|p| p.isbn.as_ref()); - for publication in publications { - if let Some(isbn) = &publication.isbn.as_ref().map(|i| i.to_string()) { - isbns.push(isbn.replace('-', "")); - // The default product ISBN is the PDF's - if publication.publication_type.eq(&PublicationType::PDF) { - main_isbn = isbn.replace('-', ""); - } - // Books that don't have a PDF ISBN will use the paperback's - if publication.publication_type.eq(&PublicationType::PAPERBACK) && main_isbn.is_empty() - { - main_isbn = isbn.replace('-', ""); - } - } - } + // The default product ISBN is the PDF's + let main_isbn = pdf_isbn + // Books that don't have a PDF ISBN will use the paperback's + .or(paperback_isbn) + .map_or_else(|| "".to_string(), |i| i.to_string()) + .replace('-', ""); + let print_isbn = hardback_isbn + .or(paperback_isbn) + .map_or_else(|| "".to_string(), |i| i.to_string()) + .replace('-', ""); - (main_isbn, isbns) + (main_isbn, print_isbn) } impl XmlElement for WorkStatus { @@ -657,6 +662,7 @@ mod tests { // Test standard output let output = generate_test_output(true, &test_language); + assert!(output.contains(r#""#)); assert!(output.contains(r#" 02"#)); assert!(output.contains(r#" spa"#)); @@ -668,6 +674,7 @@ mod tests { ] { test_language.language_relation = language_relation; let output = generate_test_output(true, &test_language); + assert!(output.contains(r#""#)); assert!(output.contains(r#" 01"#)); assert!(output.contains(r#" wel"#)); } @@ -727,29 +734,49 @@ mod tests { }], contributions: vec![], languages: vec![], - publications: vec![WorkPublications { - publication_id: Uuid::from_str("00000000-0000-0000-DDDD-000000000004").unwrap(), - publication_type: PublicationType::PDF, - isbn: Some(Isbn::from_str("978-3-16-148410-0").unwrap()), - width_mm: None, - width_cm: None, - width_in: None, - height_mm: None, - height_cm: None, - height_in: None, - depth_mm: None, - depth_cm: None, - depth_in: None, - weight_g: None, - weight_oz: None, - prices: vec![], - locations: vec![WorkPublicationsLocations { - landing_page: Some("https://www.book.com/pdf_landing".to_string()), - full_text_url: Some("https://www.book.com/pdf_fulltext".to_string()), - location_platform: LocationPlatform::OTHER, - canonical: true, - }], - }], + publications: vec![ + WorkPublications { + publication_id: Uuid::from_str("00000000-0000-0000-CCCC-000000000003").unwrap(), + publication_type: PublicationType::HARDBACK, + isbn: Some(Isbn::from_str("978-1-4028-9462-6").unwrap()), + width_mm: None, + width_cm: None, + width_in: None, + height_mm: None, + height_cm: None, + height_in: None, + depth_mm: None, + depth_cm: None, + depth_in: None, + weight_g: None, + weight_oz: None, + prices: vec![], + locations: vec![], + }, + WorkPublications { + publication_id: Uuid::from_str("00000000-0000-0000-DDDD-000000000004").unwrap(), + publication_type: PublicationType::PDF, + isbn: Some(Isbn::from_str("978-3-16-148410-0").unwrap()), + width_mm: None, + width_cm: None, + width_in: None, + height_mm: None, + height_cm: None, + height_in: None, + depth_mm: None, + depth_cm: None, + depth_in: None, + weight_g: None, + weight_oz: None, + prices: vec![], + locations: vec![WorkPublicationsLocations { + landing_page: Some("https://www.book.com/pdf_landing".to_string()), + full_text_url: Some("https://www.book.com/pdf_fulltext".to_string()), + location_platform: LocationPlatform::OTHER, + canonical: true, + }], + }, + ], subjects: vec![ WorkSubjects { subject_code: "AAB".to_string(), @@ -874,10 +901,10 @@ mod tests { assert!(output.contains(r#" 01"#)); assert!(output.contains(r#" 19991231"#)); assert!(output.contains(r#" "#)); - assert!(output.contains(r#" 06"#)); + assert!(output.contains(r#" 13"#)); assert!(output.contains(r#" "#)); assert!(output.contains(r#" 15"#)); - assert!(output.contains(r#" 9783161484100"#)); + assert!(output.contains(r#" 9781402894626"#)); assert!(output.contains(r#" "#)); assert!(output.contains(r#" "#)); assert!(output.contains(r#" "#)); @@ -930,6 +957,7 @@ mod tests { test_work.publication_date = None; test_work.landing_page = None; test_work.subjects.clear(); + test_work.publications[0].publication_type = PublicationType::XML; let output = generate_test_output(true, &test_work); // No DOI supplied assert!(!output.contains(r#" 06"#)); @@ -985,10 +1013,16 @@ mod tests { assert!(!output.contains(r#" keyword1"#)); assert!(!output.contains(r#" B2"#)); assert!(!output.contains(r#" custom1"#)); + // No print ISBN supplied + assert!(!output.contains(r#" "#)); + assert!(!output.contains(r#" 13"#)); + assert!(!output.contains(r#" "#)); + assert!(!output.contains(r#" 15"#)); + assert!(!output.contains(r#" 9781402894626"#)); - // Remove the only publication, which is the PDF + // Remove the last publication, which is the PDF // Result: error (can't generate OAPEN ONIX without PDF URL) - test_work.publications.clear(); + test_work.publications.pop(); let output = generate_test_output(false, &test_work); assert_eq!( output, diff --git a/thoth-export-server/src/xml/onix3_oapen.rs b/thoth-export-server/src/xml/onix3_oapen.rs index 065633a5..4d93fff3 100644 --- a/thoth-export-server/src/xml/onix3_oapen.rs +++ b/thoth-export-server/src/xml/onix3_oapen.rs @@ -756,6 +756,7 @@ mod tests { // Test standard output let output = generate_test_output(true, &test_language); + assert!(output.contains(r#""#)); assert!(output.contains(r#" 02"#)); assert!(output.contains(r#" spa"#)); @@ -767,6 +768,7 @@ mod tests { ] { test_language.language_relation = language_relation; let output = generate_test_output(true, &test_language); + assert!(output.contains(r#""#)); assert!(output.contains(r#" 01"#)); assert!(output.contains(r#" wel"#)); } diff --git a/thoth-export-server/src/xml/onix3_overdrive.rs b/thoth-export-server/src/xml/onix3_overdrive.rs new file mode 100644 index 00000000..ed005bc7 --- /dev/null +++ b/thoth-export-server/src/xml/onix3_overdrive.rs @@ -0,0 +1,1417 @@ +use chrono::Utc; +use std::collections::HashMap; +use std::io::Write; +use thoth_client::{ + ContributionType, CurrencyCode, LanguageRelation, PublicationType, SubjectType, Work, + WorkContributions, WorkFundings, WorkIssues, WorkLanguages, WorkPublications, WorkStatus, + WorkType, +}; +use xml::writer::{EventWriter, XmlEvent}; + +use super::{write_element_block, XmlElement, XmlSpecification}; +use crate::xml::{write_full_element_block, XmlElementBlock}; +use thoth_errors::{ThothError, ThothResult}; + +pub struct Onix3Overdrive {} + +impl XmlSpecification for Onix3Overdrive { + fn handle_event(w: &mut EventWriter, works: &[Work]) -> ThothResult<()> { + let mut attr_map: HashMap<&str, &str> = HashMap::new(); + + attr_map.insert("release", "3.0"); + attr_map.insert("xmlns", "http://ns.editeur.org/onix/3.0/reference"); + + write_full_element_block("ONIXMessage", None, Some(attr_map), w, |w| { + write_element_block("Header", w, |w| { + write_element_block("Sender", w, |w| { + write_element_block("SenderName", w, |w| { + w.write(XmlEvent::Characters("Thoth")).map_err(|e| e.into()) + })?; + write_element_block("EmailAddress", w, |w| { + w.write(XmlEvent::Characters("info@thoth.pub")) + .map_err(|e| e.into()) + }) + })?; + write_element_block("SentDateTime", w, |w| { + w.write(XmlEvent::Characters( + &Utc::now().format("%Y%m%dT%H%M%S").to_string(), + )) + .map_err(|e| e.into()) + }) + })?; + + match works.len() { + 0 => Err(ThothError::IncompleteMetadataRecord( + "onix_3.0::overdrive".to_string(), + "Not enough data".to_string(), + )), + 1 => XmlElementBlock::::xml_element(works.first().unwrap(), w), + _ => { + for work in works.iter() { + // Do not include Chapters in full publisher metadata record + // (assumes that a publisher will always have more than one work) + if work.work_type != WorkType::BOOK_CHAPTER { + XmlElementBlock::::xml_element(work, w).ok(); + } + } + Ok(()) + } + } + }) + } +} + +impl XmlElementBlock for Work { + fn xml_element(&self, w: &mut EventWriter) -> ThothResult<()> { + // Don't output works with no publication date (mandatory in OverDrive) + if self.publication_date.is_none() { + Err(ThothError::IncompleteMetadataRecord( + "onix_3.0::overdrive".to_string(), + "Missing Publication Date".to_string(), + )) + // Don't output works with no long abstract (Description element mandatory in OverDrive) + } else if self.long_abstract.is_none() { + Err(ThothError::IncompleteMetadataRecord( + "onix_3.0::overdrive".to_string(), + "Missing Long Abstract".to_string(), + )) + // Don't output works with no language codes (mandatory in OverDrive) + } else if self.languages.is_empty() { + Err(ThothError::IncompleteMetadataRecord( + "onix_3.0::overdrive".to_string(), + "Missing Language Code(s)".to_string(), + )) + // We can only generate the document if there's an EPUB or PDF + // with a non-zero price (OverDrive only accepts priced items) + } else if let Some(main_publication) = self + .publications + .iter() + // For preference, distribute the EPUB only + .find(|p| { + p.publication_type.eq(&PublicationType::EPUB) + && p.locations + .iter() + .any(|l| l.canonical && l.full_text_url.is_some()) + // Thoth database only accepts non-zero prices + && !p.prices.is_empty() + }) + // If no EPUB is found, distribute the PDF only + .or_else(|| { + self.publications.iter().find(|p| { + p.publication_type.eq(&PublicationType::PDF) + && p.locations + .iter() + .any(|l| l.canonical && l.full_text_url.is_some()) + // Thoth database only accepts non-zero prices + && !p.prices.is_empty() + }) + }) + { + let work_id = format!("urn:uuid:{}", self.work_id); + let (main_isbn, isbns) = get_publications_data(&self.publications, main_publication); + write_element_block("Product", w, |w| { + write_element_block("RecordReference", w, |w| { + w.write(XmlEvent::Characters(&work_id)) + .map_err(|e| e.into()) + })?; + // 03 Notification confirmed on publication + write_element_block("NotificationType", w, |w| { + w.write(XmlEvent::Characters("03")).map_err(|e| e.into()) + })?; + // 01 Publisher + write_element_block("RecordSourceType", w, |w| { + w.write(XmlEvent::Characters("01")).map_err(|e| e.into()) + })?; + write_element_block("ProductIdentifier", w, |w| { + // 01 Proprietary + write_element_block("ProductIDType", w, |w| { + w.write(XmlEvent::Characters("01")).map_err(|e| e.into()) + })?; + write_element_block("IDValue", w, |w| { + w.write(XmlEvent::Characters(&work_id)) + .map_err(|e| e.into()) + }) + })?; + write_element_block("ProductIdentifier", w, |w| { + // 15 ISBN-13 + write_element_block("ProductIDType", w, |w| { + w.write(XmlEvent::Characters("15")).map_err(|e| e.into()) + })?; + write_element_block("IDValue", w, |w| { + w.write(XmlEvent::Characters(&main_isbn)) + .map_err(|e| e.into()) + }) + })?; + if let Some(doi) = &self.doi { + write_element_block("ProductIdentifier", w, |w| { + write_element_block("ProductIDType", w, |w| { + w.write(XmlEvent::Characters("06")).map_err(|e| e.into()) + })?; + write_element_block("IDValue", w, |w| { + w.write(XmlEvent::Characters(&doi.to_string())) + .map_err(|e| e.into()) + }) + })?; + } + write_element_block("DescriptiveDetail", w, |w| { + // 00 Single-component retail product + write_element_block("ProductComposition", w, |w| { + w.write(XmlEvent::Characters("00")).map_err(|e| e.into()) + })?; + // EB Digital download and online + write_element_block("ProductForm", w, |w| { + w.write(XmlEvent::Characters("EB")).map_err(|e| e.into()) + })?; + let digital_type = match main_publication.publication_type { + PublicationType::EPUB => "E101", + PublicationType::PDF => "E107", + _ => unreachable!(), + }; + write_element_block("ProductFormDetail", w, |w| { + w.write(XmlEvent::Characters(digital_type)) + .map_err(|e| e.into()) + })?; + // 10 Text (eye-readable) + write_element_block("PrimaryContentType", w, |w| { + w.write(XmlEvent::Characters("10")).map_err(|e| e.into()) + })?; + if let Some(license) = &self.license { + write_element_block("EpubLicense", w, |w| { + write_element_block("EpubLicenseName", w, |w| { + w.write(XmlEvent::Characters("Creative Commons License")) + .map_err(|e| e.into()) + })?; + write_element_block("EpubLicenseExpression", w, |w| { + write_element_block("EpubLicenseExpressionType", w, |w| { + w.write(XmlEvent::Characters("02")).map_err(|e| e.into()) + })?; + write_element_block("EpubLicenseExpressionLink", w, |w| { + w.write(XmlEvent::Characters(license)).map_err(|e| e.into()) + }) + }) + })?; + } + for issue in &self.issues { + XmlElementBlock::::xml_element(issue, w).ok(); + } + write_element_block("TitleDetail", w, |w| { + // 01 Distinctive title (book) + write_element_block("TitleType", w, |w| { + w.write(XmlEvent::Characters("01")).map_err(|e| e.into()) + })?; + write_element_block("TitleElement", w, |w| { + // 01 Product + write_element_block("TitleElementLevel", w, |w| { + w.write(XmlEvent::Characters("01")).map_err(|e| e.into()) + })?; + write_element_block("TitleText", w, |w| { + w.write(XmlEvent::Characters(&self.title)) + .map_err(|e| e.into()) + })?; + if let Some(subtitle) = &self.subtitle { + write_element_block("Subtitle", w, |w| { + w.write(XmlEvent::Characters(subtitle)) + .map_err(|e| e.into()) + })?; + } + Ok(()) + }) + })?; + for contribution in &self.contributions { + XmlElementBlock::::xml_element(contribution, w).ok(); + } + for language in &self.languages { + XmlElementBlock::::xml_element(language, w).ok(); + } + if let Some(page_count) = self.page_count { + write_element_block("Extent", w, |w| { + // 00 Main content + write_element_block("ExtentType", w, |w| { + w.write(XmlEvent::Characters("00")).map_err(|e| e.into()) + })?; + write_element_block("ExtentValue", w, |w| { + w.write(XmlEvent::Characters(&page_count.to_string())) + .map_err(|e| e.into()) + })?; + // 03 Pages + write_element_block("ExtentUnit", w, |w| { + w.write(XmlEvent::Characters("03")).map_err(|e| e.into()) + }) + })?; + } + for subject in &self.subjects { + write_element_block("Subject", w, |w| { + XmlElement::::xml_element(&subject.subject_type, w)?; + write_element_block("SubjectCode", w, |w| { + w.write(XmlEvent::Characters(&subject.subject_code)) + .map_err(|e| e.into()) + }) + })?; + } + write_element_block("Audience", w, |w| { + // 01 ONIX audience codes + write_element_block("AudienceCodeType", w, |w| { + w.write(XmlEvent::Characters("01")).map_err(|e| e.into()) + })?; + // 06 Professional and scholarly + write_element_block("AudienceCodeValue", w, |w| { + w.write(XmlEvent::Characters("06")).map_err(|e| e.into()) + }) + }) + })?; + let mut lang_fmt: HashMap<&str, &str> = HashMap::new(); + lang_fmt.insert("language", "eng"); + write_element_block("CollateralDetail", w, |w| { + write_element_block("TextContent", w, |w| { + // 03 Description ("30 Abstract" not implemented in OverDrive) + write_element_block("TextType", w, |w| { + w.write(XmlEvent::Characters("03")).map_err(|e| e.into()) + })?; + // 00 Unrestricted + write_element_block("ContentAudience", w, |w| { + w.write(XmlEvent::Characters("00")).map_err(|e| e.into()) + })?; + write_full_element_block("Text", None, Some(lang_fmt.clone()), w, |w| { + w.write(XmlEvent::Characters(self.long_abstract.as_ref().unwrap())) + .map_err(|e| e.into()) + }) + })?; + if let Some(toc) = &self.toc { + write_element_block("TextContent", w, |w| { + // 04 Table of contents + write_element_block("TextType", w, |w| { + w.write(XmlEvent::Characters("04")).map_err(|e| e.into()) + })?; + // 00 Unrestricted + write_element_block("ContentAudience", w, |w| { + w.write(XmlEvent::Characters("00")).map_err(|e| e.into()) + })?; + write_full_element_block("Text", None, Some(lang_fmt.clone()), w, |w| { + w.write(XmlEvent::Characters(toc)).map_err(|e| e.into()) + }) + })?; + } + if let Some(cover_url) = &self.cover_url { + write_element_block("SupportingResource", w, |w| { + // 01 Front cover + write_element_block("ResourceContentType", w, |w| { + w.write(XmlEvent::Characters("01")).map_err(|e| e.into()) + })?; + // 00 Unrestricted + write_element_block("ContentAudience", w, |w| { + w.write(XmlEvent::Characters("00")).map_err(|e| e.into()) + })?; + // 03 Image + write_element_block("ResourceMode", w, |w| { + w.write(XmlEvent::Characters("03")).map_err(|e| e.into()) + })?; + write_element_block("ResourceVersion", w, |w| { + // 02 Downloadable file + write_element_block("ResourceForm", w, |w| { + w.write(XmlEvent::Characters("02")).map_err(|e| e.into()) + })?; + write_element_block("ResourceLink", w, |w| { + w.write(XmlEvent::Characters(cover_url)) + .map_err(|e| e.into()) + }) + }) + })?; + } + Ok(()) + })?; + write_element_block("PublishingDetail", w, |w| { + write_element_block("Imprint", w, |w| { + write_element_block("ImprintName", w, |w| { + w.write(XmlEvent::Characters(&self.imprint.imprint_name)) + .map_err(|e| e.into()) + }) + })?; + write_element_block("Publisher", w, |w| { + // 01 Publisher + write_element_block("PublishingRole", w, |w| { + w.write(XmlEvent::Characters("01")).map_err(|e| e.into()) + })?; + write_element_block("PublisherName", w, |w| { + w.write(XmlEvent::Characters(&self.imprint.publisher.publisher_name)) + .map_err(|e| e.into()) + }) + })?; + for funding in &self.fundings { + XmlElementBlock::::xml_element(funding, w).ok(); + } + if let Some(place) = &self.place { + write_element_block("CityOfPublication", w, |w| { + w.write(XmlEvent::Characters(place)).map_err(|e| e.into()) + })?; + } + XmlElement::::xml_element(&self.work_status, w)?; + write_element_block("PublishingDate", w, |w| { + let mut date_fmt: HashMap<&str, &str> = HashMap::new(); + date_fmt.insert("dateformat", "00"); // 00 YYYYMMDD + + write_element_block("PublishingDateRole", w, |w| { + // 01 Publication date + w.write(XmlEvent::Characters("01")).map_err(|e| e.into()) + })?; + // dateformat="00" YYYYMMDD + write_full_element_block("Date", None, Some(date_fmt), w, |w| { + w.write(XmlEvent::Characters( + &self.publication_date.unwrap().format("%Y%m%d").to_string(), + )) + .map_err(|e| e.into()) + }) + })?; + write_element_block("SalesRights", w, |w| { + // 02 For sale with non-exclusive rights in the specified countries or territories + write_element_block("SalesRightsType", w, |w| { + w.write(XmlEvent::Characters("02")).map_err(|e| e.into()) + })?; + write_element_block("Territory", w, |w| { + write_element_block("RegionsIncluded", w, |w| { + w.write(XmlEvent::Characters("WORLD")).map_err(|e| e.into()) + }) + }) + }) + })?; + if !isbns.is_empty() { + write_element_block("RelatedMaterial", w, |w| { + for isbn in &isbns { + write_element_block("RelatedProduct", w, |w| { + // 06 Alternative format + write_element_block("ProductRelationCode", w, |w| { + w.write(XmlEvent::Characters("06")).map_err(|e| e.into()) + })?; + write_element_block("ProductIdentifier", w, |w| { + // 15 ISBN-13 + write_element_block("ProductIDType", w, |w| { + w.write(XmlEvent::Characters("15")).map_err(|e| e.into()) + })?; + write_element_block("IDValue", w, |w| { + w.write(XmlEvent::Characters(isbn)).map_err(|e| e.into()) + }) + }) + })?; + } + Ok(()) + })?; + } + write_element_block("ProductSupply", w, |w| { + write_element_block("Market", w, |w| { + write_element_block("Territory", w, |w| { + write_element_block("RegionsIncluded", w, |w| { + w.write(XmlEvent::Characters("WORLD")).map_err(|e| e.into()) + }) + }) + })?; + let mut supplies: HashMap = HashMap::new(); + supplies.insert( + // Main publication's canonical location is guaranteed to have a full text URL + main_publication + .locations + .iter() + .find(|l| l.canonical) + .and_then(|l| l.full_text_url.clone()) + .unwrap(), + ( + "29".to_string(), + "Publisher's website: download the title".to_string(), + ), + ); + if let Some(landing_page) = &self.landing_page { + supplies.insert( + landing_page.to_string(), + ( + "01".to_string(), + "Publisher's website: web shop".to_string(), + ), + ); + } + for (url, description) in supplies.iter() { + write_element_block("SupplyDetail", w, |w| { + write_element_block("Supplier", w, |w| { + // 09 Publisher to end-customers + write_element_block("SupplierRole", w, |w| { + w.write(XmlEvent::Characters("09")).map_err(|e| e.into()) + })?; + write_element_block("SupplierName", w, |w| { + w.write(XmlEvent::Characters( + &self.imprint.publisher.publisher_name, + )) + .map_err(|e| e.into()) + })?; + write_element_block("Website", w, |w| { + // 01 Publisher’s corporate website + write_element_block("WebsiteRole", w, |w| { + w.write(XmlEvent::Characters(&description.0)) + .map_err(|e| e.into()) + })?; + write_element_block("WebsiteDescription", w, |w| { + w.write(XmlEvent::Characters(&description.1)) + .map_err(|e| e.into()) + })?; + write_element_block("WebsiteLink", w, |w| { + w.write(XmlEvent::Characters(url)).map_err(|e| e.into()) + }) + }) + })?; + // 20 Available from us (form of availability unspecified) + // (99 Contact supplier is not supported by OverDrive) + write_element_block("ProductAvailability", w, |w| { + w.write(XmlEvent::Characters("20")).map_err(|e| e.into()) + })?; + write_element_block("SupplyDate", w, |w| { + let mut date_fmt: HashMap<&str, &str> = HashMap::new(); + date_fmt.insert("dateformat", "00"); // 00 YYYYMMDD + + write_element_block("SupplyDateRole", w, |w| { + // 02 Embargo Date + w.write(XmlEvent::Characters("02")).map_err(|e| e.into()) + })?; + // dateformat="00" YYYYMMDD + write_full_element_block("Date", None, Some(date_fmt), w, |w| { + w.write(XmlEvent::Characters( + &self + .publication_date + .unwrap() + .format("%Y%m%d") + .to_string(), + )) + .map_err(|e| e.into()) + }) + })?; + // Price element is required for OverDrive. Assume the USD price is canonical. + if let Some(price) = main_publication + .prices + .iter() + .find(|pr| { + // Thoth database only accepts non-zero prices + pr.currency_code.eq(&CurrencyCode::USD) + }) + .map(|pr| pr.unit_price) + { + let formatted_price = format!("{:.2}", price); + write_element_block("Price", w, |w| { + // 02 RRP including tax + write_element_block("PriceType", w, |w| { + w.write(XmlEvent::Characters("02")).map_err(|e| e.into()) + })?; + write_element_block("PriceAmount", w, |w| { + w.write(XmlEvent::Characters(&formatted_price)) + .map_err(|e| e.into()) + })?; + write_element_block("CurrencyCode", w, |w| { + w.write(XmlEvent::Characters("USD")).map_err(|e| e.into()) + })?; + write_element_block("Territory", w, |w| { + write_element_block("RegionsIncluded", w, |w| { + w.write(XmlEvent::Characters("WORLD")) + .map_err(|e| e.into()) + }) + }) + }) + } else { + Err(ThothError::IncompleteMetadataRecord( + "onix_3.0::overdrive".to_string(), + "No USD price found".to_string(), + )) + } + })?; + } + Ok(()) + }) + }) + } else { + Err(ThothError::IncompleteMetadataRecord( + "onix_3.0::overdrive".to_string(), + "No priced EPUB or PDF URL".to_string(), + )) + } + } +} + +fn get_publications_data( + publications: &[WorkPublications], + main_publication: &WorkPublications, +) -> (String, Vec) { + let mut main_isbn = "".to_string(); + let mut isbns: Vec = Vec::new(); + + for publication in publications { + if let Some(isbn) = &publication.isbn.as_ref().map(|i| i.to_string()) { + isbns.push(isbn.replace('-', "")); + // The default product ISBN is the main publication's (EPUB or PDF) + if publication + .publication_id + .eq(&main_publication.publication_id) + { + main_isbn = isbn.replace('-', ""); + } + // If the main publication has no ISBN, use either the PDF's or the paperback's + // (no guarantee as to which will be chosen) + if (publication.publication_type.eq(&PublicationType::PDF) + || publication.publication_type.eq(&PublicationType::PAPERBACK)) + && main_isbn.is_empty() + { + main_isbn = isbn.replace('-', ""); + } + } + } + + (main_isbn, isbns) +} + +impl XmlElement for WorkStatus { + const ELEMENT: &'static str = "PublishingStatus"; + + fn value(&self) -> &'static str { + match self { + WorkStatus::UNSPECIFIED => "00", + WorkStatus::CANCELLED => "01", + WorkStatus::FORTHCOMING => "02", + WorkStatus::POSTPONED_INDEFINITELY => "03", + WorkStatus::ACTIVE => "04", + WorkStatus::NO_LONGER_OUR_PRODUCT => "05", + WorkStatus::OUT_OF_STOCK_INDEFINITELY => "06", + WorkStatus::OUT_OF_PRINT => "07", + WorkStatus::INACTIVE => "08", + WorkStatus::UNKNOWN => "09", + WorkStatus::REMAINDERED => "10", + WorkStatus::WITHDRAWN_FROM_SALE => "11", + WorkStatus::RECALLED => "15", + WorkStatus::Other(_) => unreachable!(), + } + } +} + +impl XmlElement for SubjectType { + const ELEMENT: &'static str = "SubjectSchemeIdentifier"; + + fn value(&self) -> &'static str { + match self { + SubjectType::BIC => "12", + SubjectType::BISAC => "10", + SubjectType::KEYWORD => "20", + SubjectType::LCC => "04", + SubjectType::THEMA => "93", + SubjectType::CUSTOM => "B2", + SubjectType::Other(_) => unreachable!(), + } + } +} + +impl XmlElement for LanguageRelation { + const ELEMENT: &'static str = "LanguageRole"; + + fn value(&self) -> &'static str { + match self { + LanguageRelation::ORIGINAL => "01", + LanguageRelation::TRANSLATED_FROM => "02", + LanguageRelation::TRANSLATED_INTO => "01", + LanguageRelation::Other(_) => unreachable!(), + } + } +} + +impl XmlElement for ContributionType { + const ELEMENT: &'static str = "ContributorRole"; + + fn value(&self) -> &'static str { + match self { + ContributionType::AUTHOR => "A01", + ContributionType::EDITOR => "B01", + ContributionType::TRANSLATOR => "B06", + ContributionType::PHOTOGRAPHER => "A13", + ContributionType::ILUSTRATOR => "A12", + ContributionType::MUSIC_EDITOR => "B25", + ContributionType::FOREWORD_BY => "A23", + ContributionType::INTRODUCTION_BY => "A24", + ContributionType::AFTERWORD_BY => "A19", + ContributionType::PREFACE_BY => "A15", + ContributionType::Other(_) => unreachable!(), + } + } +} + +impl XmlElementBlock for WorkContributions { + fn xml_element(&self, w: &mut EventWriter) -> ThothResult<()> { + write_element_block("Contributor", w, |w| { + write_element_block("SequenceNumber", w, |w| { + w.write(XmlEvent::Characters(&self.contribution_ordinal.to_string())) + .map_err(|e| e.into()) + })?; + XmlElement::::xml_element(&self.contribution_type, w)?; + + if let Some(orcid) = &self.contributor.orcid { + write_element_block("NameIdentifier", w, |w| { + write_element_block("NameIDType", w, |w| { + w.write(XmlEvent::Characters("21")).map_err(|e| e.into()) + })?; + write_element_block("IDValue", w, |w| { + w.write(XmlEvent::Characters(&orcid.to_string())) + .map_err(|e| e.into()) + }) + })?; + } + if let Some(first_name) = &self.first_name { + write_element_block("NamesBeforeKey", w, |w| { + w.write(XmlEvent::Characters(first_name)) + .map_err(|e| e.into()) + })?; + write_element_block("KeyNames", w, |w| { + w.write(XmlEvent::Characters(&self.last_name)) + .map_err(|e| e.into()) + })?; + } else { + write_element_block("PersonName", w, |w| { + w.write(XmlEvent::Characters(&self.full_name)) + .map_err(|e| e.into()) + })?; + } + Ok(()) + }) + } +} + +impl XmlElementBlock for WorkLanguages { + fn xml_element(&self, w: &mut EventWriter) -> ThothResult<()> { + write_element_block("Language", w, |w| { + XmlElement::::xml_element(&self.language_relation, w).ok(); + // not worth implementing XmlElement for LanguageCode as all cases would + // need to be exhaustively matched and the codes are equivalent anyway + write_element_block("LanguageCode", w, |w| { + w.write(XmlEvent::Characters( + &self.language_code.to_string().to_lowercase(), + )) + .map_err(|e| e.into()) + }) + }) + } +} + +impl XmlElementBlock for WorkIssues { + fn xml_element(&self, w: &mut EventWriter) -> ThothResult<()> { + write_element_block("Collection", w, |w| { + // 10 Publisher collection (e.g. series) + write_element_block("CollectionType", w, |w| { + w.write(XmlEvent::Characters("10")).map_err(|e| e.into()) + })?; + write_element_block("CollectionIdentifier", w, |w| { + // 02 ISSN + write_element_block("CollectionIDType", w, |w| { + w.write(XmlEvent::Characters("02")).map_err(|e| e.into()) + })?; + write_element_block("IDValue", w, |w| { + w.write(XmlEvent::Characters( + &self.series.issn_digital.replace('-', ""), + )) + .map_err(|e| e.into()) + }) + })?; + write_element_block("TitleDetail", w, |w| { + // 01 Cover title (serial) + write_element_block("TitleType", w, |w| { + w.write(XmlEvent::Characters("01")).map_err(|e| e.into()) + })?; + write_element_block("TitleElement", w, |w| { + // 02 Collection level + write_element_block("TitleElementLevel", w, |w| { + w.write(XmlEvent::Characters("02")).map_err(|e| e.into()) + })?; + write_element_block("PartNumber", w, |w| { + w.write(XmlEvent::Characters(&self.issue_ordinal.to_string())) + .map_err(|e| e.into()) + })?; + write_element_block("TitleText", w, |w| { + w.write(XmlEvent::Characters(&self.series.series_name)) + .map_err(|e| e.into()) + }) + }) + }) + }) + } +} + +impl XmlElementBlock for WorkFundings { + fn xml_element(&self, w: &mut EventWriter) -> ThothResult<()> { + write_element_block("Publisher", w, |w| { + // 16 Funding body + write_element_block("PublishingRole", w, |w| { + w.write(XmlEvent::Characters("16")).map_err(|e| e.into()) + })?; + write_element_block("PublisherName", w, |w| { + w.write(XmlEvent::Characters(&self.institution.institution_name)) + .map_err(|e| e.into()) + })?; + let mut identifiers: HashMap = HashMap::new(); + if let Some(program) = &self.program { + identifiers.insert("programname".to_string(), program.to_string()); + } + if let Some(project_name) = &self.project_name { + identifiers.insert("projectname".to_string(), project_name.to_string()); + } + if let Some(grant_number) = &self.grant_number { + identifiers.insert("grantnumber".to_string(), grant_number.to_string()); + } + if !identifiers.is_empty() { + write_element_block("Funding", w, |w| { + for (typename, value) in &identifiers { + write_element_block("FundingIdentifier", w, |w| { + // 01 Proprietary + write_element_block("FundingIDType", w, |w| { + w.write(XmlEvent::Characters("01")).map_err(|e| e.into()) + })?; + write_element_block("IDTypeName", w, |w| { + w.write(XmlEvent::Characters(typename)) + .map_err(|e| e.into()) + })?; + write_element_block("IDValue", w, |w| { + w.write(XmlEvent::Characters(value)).map_err(|e| e.into()) + }) + })?; + } + Ok(()) + })?; + } + Ok(()) + }) + } +} + +#[cfg(test)] +mod tests { + // Testing note: XML nodes cannot be guaranteed to be output in the same order every time + // We therefore rely on `assert!(contains)` rather than `assert_eq!` + use super::*; + use std::str::FromStr; + use thoth_api::model::Doi; + use thoth_api::model::Isbn; + use thoth_api::model::Orcid; + use thoth_client::{ + ContributionType, LanguageCode, LanguageRelation, LocationPlatform, PublicationType, + WorkContributionsContributor, WorkImprint, WorkImprintPublisher, WorkIssuesSeries, + WorkPublicationsLocations, WorkPublicationsPrices, WorkStatus, WorkSubjects, WorkType, + }; + use uuid::Uuid; + + fn generate_test_output( + expect_ok: bool, + input: &impl XmlElementBlock, + ) -> String { + // Helper function based on `XmlSpecification::generate` + let mut buffer = Vec::new(); + let mut writer = xml::writer::EmitterConfig::new() + .perform_indent(true) + .create_writer(&mut buffer); + let wrapped_output = XmlElementBlock::::xml_element(input, &mut writer) + .map(|_| buffer) + .and_then(|xml| { + String::from_utf8(xml) + .map_err(|_| ThothError::InternalError("Could not parse XML".to_string())) + }); + if expect_ok { + assert!(wrapped_output.is_ok()); + wrapped_output.unwrap() + } else { + assert!(wrapped_output.is_err()); + wrapped_output.unwrap_err().to_string() + } + } + + #[test] + fn test_onix3_overdrive_contributions() { + let mut test_contribution = WorkContributions { + contribution_type: ContributionType::AUTHOR, + first_name: Some("Author".to_string()), + last_name: "1".to_string(), + full_name: "Author 1".to_string(), + main_contribution: true, + biography: None, + contribution_ordinal: 1, + contributor: WorkContributionsContributor { + orcid: Some(Orcid::from_str("https://orcid.org/0000-0002-0000-0001").unwrap()), + }, + affiliations: vec![], + }; + + // Test standard output + let output = generate_test_output(true, &test_contribution); + assert!(output.contains(r#" 1"#)); + assert!(output.contains(r#" A01"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 21"#)); + assert!(output.contains(r#" 0000-0002-0000-0001"#)); + assert!(output.contains(r#" "#)); + // Given name is output as NamesBeforeKey and family name as KeyNames + assert!(output.contains(r#" Author"#)); + assert!(output.contains(r#" 1"#)); + // PersonName is not output when given name is supplied + assert!(!output.contains(r#" Author 1"#)); + + // Change all possible values to test that output is updated + test_contribution.contribution_type = ContributionType::EDITOR; + test_contribution.contribution_ordinal = 2; + test_contribution.contributor.orcid = None; + test_contribution.first_name = None; + let output = generate_test_output(true, &test_contribution); + assert!(output.contains(r#" 2"#)); + assert!(output.contains(r#" B01"#)); + // No ORCID supplied + assert!(!output.contains(r#" "#)); + assert!(!output.contains(r#" 21"#)); + assert!(!output.contains(r#" 0000-0002-0000-0001"#)); + assert!(!output.contains(r#" "#)); + // No given name supplied, so PersonName is output instead of KeyNames and NamesBeforeKey + assert!(!output.contains(r#" Author"#)); + assert!(!output.contains(r#" 1"#)); + assert!(output.contains(r#" Author 1"#)); + + // Test all remaining contributor roles + test_contribution.contribution_type = ContributionType::TRANSLATOR; + let output = generate_test_output(true, &test_contribution); + assert!(output.contains(r#" B06"#)); + test_contribution.contribution_type = ContributionType::PHOTOGRAPHER; + let output = generate_test_output(true, &test_contribution); + assert!(output.contains(r#" A13"#)); + test_contribution.contribution_type = ContributionType::ILUSTRATOR; + let output = generate_test_output(true, &test_contribution); + assert!(output.contains(r#" A12"#)); + test_contribution.contribution_type = ContributionType::MUSIC_EDITOR; + let output = generate_test_output(true, &test_contribution); + assert!(output.contains(r#" B25"#)); + test_contribution.contribution_type = ContributionType::FOREWORD_BY; + let output = generate_test_output(true, &test_contribution); + assert!(output.contains(r#" A23"#)); + test_contribution.contribution_type = ContributionType::INTRODUCTION_BY; + let output = generate_test_output(true, &test_contribution); + assert!(output.contains(r#" A24"#)); + test_contribution.contribution_type = ContributionType::AFTERWORD_BY; + let output = generate_test_output(true, &test_contribution); + assert!(output.contains(r#" A19"#)); + test_contribution.contribution_type = ContributionType::PREFACE_BY; + let output = generate_test_output(true, &test_contribution); + assert!(output.contains(r#" A15"#)); + } + + #[test] + fn test_onix3_overdrive_languages() { + let mut test_language = WorkLanguages { + language_code: LanguageCode::SPA, + language_relation: LanguageRelation::TRANSLATED_FROM, + main_language: true, + }; + + // Test standard output + let output = generate_test_output(true, &test_language); + assert!(output.contains(r#""#)); + assert!(output.contains(r#" 02"#)); + assert!(output.contains(r#" spa"#)); + + // Change all possible values to test that output is updated + test_language.language_code = LanguageCode::WEL; + for language_relation in [ + LanguageRelation::ORIGINAL, + LanguageRelation::TRANSLATED_INTO, + ] { + test_language.language_relation = language_relation; + let output = generate_test_output(true, &test_language); + assert!(output.contains(r#""#)); + assert!(output.contains(r#" 01"#)); + assert!(output.contains(r#" wel"#)); + } + } + + #[test] + fn test_onix3_overdrive_issues() { + let mut test_issue = WorkIssues { + issue_ordinal: 1, + series: WorkIssuesSeries { + series_type: thoth_client::SeriesType::JOURNAL, + series_name: "Name of series".to_string(), + issn_print: "1234-5678".to_string(), + issn_digital: "8765-4321".to_string(), + series_url: None, + series_description: None, + series_cfp_url: None, + }, + }; + + // Test standard output + let output = generate_test_output(true, &test_issue); + assert!(output.contains(r#""#)); + assert!(output.contains(r#" 10"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 02"#)); + assert!(output.contains(r#" 87654321"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 01"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 02"#)); + assert!(output.contains(r#" 1"#)); + assert!(output.contains(r#" Name of series"#)); + + // Change all possible values to test that output is updated + test_issue.issue_ordinal = 2; + test_issue.series.series_name = "Different series".to_string(); + test_issue.series.issn_digital = "1111-2222".to_string(); + let output = generate_test_output(true, &test_issue); + assert!(output.contains(r#""#)); + assert!(output.contains(r#" 10"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 02"#)); + assert!(output.contains(r#" 11112222"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 01"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 02"#)); + assert!(output.contains(r#" 2"#)); + assert!(output.contains(r#" Different series"#)); + } + + #[test] + fn test_onix3_overdrive_fundings() { + let mut test_funding = WorkFundings { + program: Some("Name of program".to_string()), + project_name: Some("Name of project".to_string()), + project_shortname: None, + grant_number: Some("Number of grant".to_string()), + jurisdiction: None, + institution: thoth_client::WorkFundingsInstitution { + institution_name: "Name of institution".to_string(), + institution_doi: None, + ror: None, + country_code: None, + }, + }; + + // Test standard output + let output = generate_test_output(true, &test_funding); + assert!(output.contains(r#""#)); + assert!(output.contains(r#" 16"#)); + assert!(output.contains(r#" Name of institution"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 01"#)); + assert!(output.contains(r#" programname"#)); + assert!(output.contains(r#" Name of program"#)); + assert!(output.contains(r#" projectname"#)); + assert!(output.contains(r#" Name of project"#)); + assert!(output.contains(r#" grantnumber"#)); + assert!(output.contains(r#" Number of grant"#)); + + // Change all possible values to test that output is updated + + test_funding.institution.institution_name = "Different institution".to_string(); + test_funding.program = None; + let output = generate_test_output(true, &test_funding); + assert!(output.contains(r#""#)); + assert!(output.contains(r#" 16"#)); + assert!(output.contains(r#" Different institution"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 01"#)); + // No program supplied + assert!(!output.contains(r#" programname"#)); + assert!(!output.contains(r#" Name of program"#)); + assert!(output.contains(r#" projectname"#)); + assert!(output.contains(r#" Name of project"#)); + assert!(output.contains(r#" grantnumber"#)); + assert!(output.contains(r#" Number of grant"#)); + + test_funding.project_name = None; + let output = generate_test_output(true, &test_funding); + assert!(output.contains(r#""#)); + assert!(output.contains(r#" 16"#)); + assert!(output.contains(r#" Different institution"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 01"#)); + // No program supplied + assert!(!output.contains(r#" programname"#)); + assert!(!output.contains(r#" Name of program"#)); + // No project supplied + assert!(!output.contains(r#" projectname"#)); + assert!(!output.contains(r#" Name of project"#)); + assert!(output.contains(r#" grantnumber"#)); + assert!(output.contains(r#" Number of grant"#)); + + test_funding.grant_number = None; + let output = generate_test_output(true, &test_funding); + assert!(output.contains(r#""#)); + assert!(output.contains(r#" 16"#)); + assert!(output.contains(r#" Different institution"#)); + // No program, project or grant supplied, so Funding block is omitted completely + assert!(!output.contains(r#" "#)); + assert!(!output.contains(r#" "#)); + assert!(!output.contains(r#" 01"#)); + assert!(!output.contains(r#" programname"#)); + assert!(!output.contains(r#" Name of program"#)); + assert!(!output.contains(r#" projectname"#)); + assert!(!output.contains(r#" Name of project"#)); + assert!(!output.contains(r#" grantnumber"#)); + assert!(!output.contains(r#" Number of grant"#)); + } + + #[test] + fn test_onix3_overdrive_works() { + let mut test_work = Work { + work_id: Uuid::from_str("00000000-0000-0000-AAAA-000000000001").unwrap(), + work_status: WorkStatus::ACTIVE, + full_title: "Book Title: Book Subtitle".to_string(), + title: "Book Title".to_string(), + subtitle: Some("Book Subtitle".to_string()), + work_type: WorkType::MONOGRAPH, + edition: Some(1), + doi: Some(Doi::from_str("https://doi.org/10.00001/BOOK.0001").unwrap()), + publication_date: Some(chrono::NaiveDate::from_ymd(1999, 12, 31)), + license: Some("https://creativecommons.org/licenses/by/4.0/".to_string()), + copyright_holder: "Author 1; Author 2".to_string(), + short_abstract: None, + long_abstract: Some("Lorem ipsum dolor sit amet".to_string()), + general_note: None, + place: Some("León, Spain".to_string()), + page_count: Some(334), + page_breakdown: None, + first_page: None, + last_page: None, + page_interval: None, + image_count: None, + table_count: None, + audio_count: None, + video_count: None, + landing_page: Some("https://www.book.com".to_string()), + toc: Some("1. Chapter 1".to_string()), + lccn: None, + oclc: None, + cover_url: Some("https://www.book.com/cover".to_string()), + cover_caption: None, + imprint: WorkImprint { + imprint_name: "OA Editions Imprint".to_string(), + publisher: WorkImprintPublisher { + publisher_name: "OA Editions".to_string(), + publisher_url: None, + }, + }, + issues: vec![], + contributions: vec![], + languages: vec![WorkLanguages { + language_code: LanguageCode::SPA, + language_relation: LanguageRelation::TRANSLATED_FROM, + main_language: true, + }], + publications: vec![WorkPublications { + publication_id: Uuid::from_str("00000000-0000-0000-DDDD-000000000004").unwrap(), + publication_type: PublicationType::PDF, + isbn: Some(Isbn::from_str("978-3-16-148410-0").unwrap()), + width_mm: None, + width_cm: None, + width_in: None, + height_mm: None, + height_cm: None, + height_in: None, + depth_mm: None, + depth_cm: None, + depth_in: None, + weight_g: None, + weight_oz: None, + prices: vec![ + WorkPublicationsPrices { + currency_code: CurrencyCode::EUR, + unit_price: 5.95, + }, + WorkPublicationsPrices { + currency_code: CurrencyCode::GBP, + unit_price: 4.95, + }, + WorkPublicationsPrices { + currency_code: CurrencyCode::USD, + unit_price: 8.0, + }, + ], + locations: vec![WorkPublicationsLocations { + landing_page: Some("https://www.book.com/ebook_landing".to_string()), + full_text_url: Some("https://www.book.com/ebook_fulltext".to_string()), + location_platform: LocationPlatform::OTHER, + canonical: true, + }], + }], + subjects: vec![ + WorkSubjects { + subject_code: "AAB".to_string(), + subject_type: SubjectType::BIC, + subject_ordinal: 1, + }, + WorkSubjects { + subject_code: "AAA000000".to_string(), + subject_type: SubjectType::BISAC, + subject_ordinal: 2, + }, + WorkSubjects { + subject_code: "JA85".to_string(), + subject_type: SubjectType::LCC, + subject_ordinal: 3, + }, + WorkSubjects { + subject_code: "JWA".to_string(), + subject_type: SubjectType::THEMA, + subject_ordinal: 4, + }, + WorkSubjects { + subject_code: "keyword1".to_string(), + subject_type: SubjectType::KEYWORD, + subject_ordinal: 5, + }, + WorkSubjects { + subject_code: "custom1".to_string(), + subject_type: SubjectType::CUSTOM, + subject_ordinal: 6, + }, + ], + fundings: vec![], + relations: vec![], + }; + + // Test standard output + let output = generate_test_output(true, &test_work); + assert!(output.contains(r#""#)); + assert!(output.contains( + r#" urn:uuid:00000000-0000-0000-aaaa-000000000001"# + )); + assert!(output.contains(r#" 03"#)); + assert!(output.contains(r#" 01"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 01"#)); + assert!(output + .contains(r#" urn:uuid:00000000-0000-0000-aaaa-000000000001"#)); + assert!(output.contains(r#" 15"#)); + assert!(output.contains(r#" 9783161484100"#)); + assert!(output.contains(r#" 06"#)); + assert!(output.contains(r#" 10.00001/BOOK.0001"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 00"#)); + assert!(output.contains(r#" EB"#)); + assert!(output.contains(r#" E107"#)); + assert!(output.contains(r#" 10"#)); + assert!(output.contains(r#" "#)); + assert!( + output.contains(r#" Creative Commons License"#) + ); + assert!(output.contains(r#" "#)); + assert!( + output.contains(r#" 02"#) + ); + assert!(output.contains(r#" https://creativecommons.org/licenses/by/4.0/"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 01"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 01"#)); + assert!(output.contains(r#" Book Title"#)); + assert!(output.contains(r#" Book Subtitle"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 02"#)); + assert!(output.contains(r#" spa"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 00"#)); + assert!(output.contains(r#" 334"#)); + assert!(output.contains(r#" 03"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 12"#)); + assert!(output.contains(r#" AAB"#)); + assert!(output.contains(r#" 10"#)); + assert!(output.contains(r#" AAA000000"#)); + assert!(output.contains(r#" 04"#)); + assert!(output.contains(r#" JA85"#)); + assert!(output.contains(r#" 93"#)); + assert!(output.contains(r#" JWA"#)); + assert!(output.contains(r#" 20"#)); + assert!(output.contains(r#" keyword1"#)); + assert!(output.contains(r#" B2"#)); + assert!(output.contains(r#" custom1"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 01"#)); + assert!(output.contains(r#" 06"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 03"#)); + assert!(output.contains(r#" 00"#)); + assert!(output.contains(r#" Lorem ipsum dolor sit amet"#)); + assert!(output.contains(r#" 04"#)); + assert!(output.contains(r#" 1. Chapter 1"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 01"#)); + assert!(output.contains(r#" 03"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 02"#)); + assert!( + output.contains(r#" https://www.book.com/cover"#) + ); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" OA Editions Imprint"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 01"#)); + assert!(output.contains(r#" OA Editions"#)); + assert!(output.contains(r#" León, Spain"#)); + assert!(output.contains(r#" 04"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 01"#)); + assert!(output.contains(r#" 19991231"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 02"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" WORLD"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 06"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 15"#)); + assert!(output.contains(r#" 9783161484100"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" WORLD"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 09"#)); + assert!(output.contains(r#" OA Editions"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 01"#)); + assert!(output.contains( + r#" Publisher's website: web shop"# + )); + assert!(output.contains(r#" https://www.book.com"#)); + assert!(output.contains(r#" 20"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 02"#)); + assert!(output.contains(r#" 19991231"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 02"#)); + assert!(output.contains(r#" 8.00"#)); + assert!(output.contains(r#" USD"#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" WORLD"#)); + assert!(output.contains(r#" 29"#)); + assert!(output.contains(r#" Publisher's website: download the title"#)); + assert!(output.contains( + r#" https://www.book.com/ebook_fulltext"# + )); + + // Remove/change some values to test (non-)output of optional blocks + test_work.doi = None; + test_work.license = None; + test_work.subtitle = None; + test_work.page_count = None; + test_work.toc = None; + test_work.cover_url = None; + test_work.place = None; + test_work.landing_page = None; + test_work.publications[0].publication_type = PublicationType::EPUB; + test_work.subjects.clear(); + let output = generate_test_output(true, &test_work); + // Ebook type changed + assert!(!output.contains(r#" E107"#)); + assert!(output.contains(r#" E101"#)); + // No DOI supplied + assert!(!output.contains(r#" 06"#)); + assert!(!output.contains(r#" 10.00001/BOOK.0001"#)); + // No licence supplied + assert!(!output.contains(r#" "#)); + assert!(!output + .contains(r#" Creative Commons License"#)); + assert!(!output.contains(r#" "#)); + assert!(!output + .contains(r#" 02"#)); + assert!(!output.contains(r#" https://creativecommons.org/licenses/by/4.0/"#)); + // No subtitle supplied (within Thoth UI this would automatically update full_title) + assert!(!output.contains(r#" Book Subtitle"#)); + // No page count supplied + assert!(!output.contains(r#" "#)); + assert!(!output.contains(r#" 00"#)); + assert!(!output.contains(r#" 334"#)); + assert!(!output.contains(r#" 03"#)); + // No table of contents or cover URL supplied: CollateralDetail block only contains long abstract + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" "#)); + assert!(output.contains(r#" 03"#)); + assert!(output.contains(r#" 00"#)); + assert!(output.contains(r#" Lorem ipsum dolor sit amet"#)); + assert!(!output.contains(r#" 04"#)); + assert!(!output.contains(r#" 1. Chapter 1"#)); + assert!(!output.contains(r#" "#)); + assert!(!output.contains(r#" 01"#)); + assert!(!output.contains(r#" 03"#)); + assert!(!output.contains(r#" "#)); + assert!(!output.contains(r#" 02"#)); + assert!(!output + .contains(r#" "https://www.book.com/cover""#)); + // No place supplied + assert!(!output.contains(r#" León, Spain"#)); + // No landing page supplied: only one SupplyDetail block, linking to ebook download + assert!(!output.contains(r#" 01"#)); + assert!(!output.contains( + r#" Publisher's website: web shop"# + )); + assert!(!output.contains(r#" https://www.book.com"#)); + // No subjects supplied + assert!(!output.contains(r#" "#)); + assert!(!output.contains(r#" 12"#)); + assert!(!output.contains(r#" AAB"#)); + assert!(!output.contains(r#" 10"#)); + assert!(!output.contains(r#" AAA000000"#)); + assert!(!output.contains(r#" 04"#)); + assert!(!output.contains(r#" JA85"#)); + assert!(!output.contains(r#" 93"#)); + assert!(!output.contains(r#" JWA"#)); + assert!(!output.contains(r#" 20"#)); + assert!(!output.contains(r#" keyword1"#)); + assert!(!output.contains(r#" B2"#)); + assert!(!output.contains(r#" custom1"#)); + + // Remove the only language: result is error + test_work.languages.clear(); + let output = generate_test_output(false, &test_work); + assert_eq!( + output, + "Could not generate onix_3.0::overdrive: Missing Language Code(s)".to_string() + ); + + // Replace language but remove long abstract: result is error + test_work.languages = vec![WorkLanguages { + language_code: LanguageCode::SPA, + language_relation: LanguageRelation::TRANSLATED_FROM, + main_language: true, + }]; + test_work.long_abstract = None; + let output = generate_test_output(false, &test_work); + assert_eq!( + output, + "Could not generate onix_3.0::overdrive: Missing Long Abstract".to_string() + ); + + // Replace long abstract but remove publication date: result is error + test_work.long_abstract = Some("Lorem ipsum dolor sit amet".to_string()); + test_work.publication_date = None; + let output = generate_test_output(false, &test_work); + assert_eq!( + output, + "Could not generate onix_3.0::overdrive: Missing Publication Date".to_string() + ); + + // Replace publication date but remove USD price: result is error + test_work.publication_date = Some(chrono::NaiveDate::from_ymd(1999, 12, 31)); + test_work.publications[0].prices.pop(); + let output = generate_test_output(false, &test_work); + assert_eq!( + output, + "Could not generate onix_3.0::overdrive: No USD price found".to_string() + ); + + // Replace USD price but remove the only (PDF) publication's only location + // Result: error (can't generate OverDrive ONIX without EPUB or PDF URL) + test_work.publications[0].prices[0].currency_code = CurrencyCode::USD; + test_work.publications[0].locations.clear(); + let output = generate_test_output(false, &test_work); + assert_eq!( + output, + "Could not generate onix_3.0::overdrive: No priced EPUB or PDF URL".to_string() + ); + } +} diff --git a/thoth-export-server/src/xml/onix3_project_muse.rs b/thoth-export-server/src/xml/onix3_project_muse.rs index b23fc6bc..155c15ad 100644 --- a/thoth-export-server/src/xml/onix3_project_muse.rs +++ b/thoth-export-server/src/xml/onix3_project_muse.rs @@ -677,6 +677,7 @@ mod tests { // Test standard output let output = generate_test_output(true, &test_language); + assert!(output.contains(r#""#)); assert!(output.contains(r#" 02"#)); assert!(output.contains(r#" spa"#)); @@ -688,6 +689,7 @@ mod tests { ] { test_language.language_relation = language_relation; let output = generate_test_output(true, &test_language); + assert!(output.contains(r#""#)); assert!(output.contains(r#" 01"#)); assert!(output.contains(r#" wel"#)); }