diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000..d1ba2bc0 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,32 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: bug +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +**To Reproduce** +Steps to reproduce the behavior: +1. Go to '...' +2. Click on '....' +3. Scroll down to '....' +4. See error + +**Expected behavior** +A clear and concise description of what you expected to happen. + +**Screenshots** +If applicable, add screenshots to help explain your problem. + +**Desktop (please complete the following information):** + - OS: [e.g. iOS] + - Browser [e.g. chrome, safari] + - Version [e.g. 22] + +**Additional context** +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 00000000..9d06944f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for Thoth +title: '' +labels: feature +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/CHANGELOG.md b/CHANGELOG.md index 67e2dabd..57ec033d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,24 @@ All notable changes to thoth will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [[0.5.0]](https://github.com/thoth-pub/thoth/releases/tag/v0.5.0) - 2021-11-28 +### Added + - [#297](https://github.com/thoth-pub/thoth/issues/297) - Implement publication location + +### Changed + - Requirement to Number fields preventing user from entering numbers below 0 for Counts/below 1 for Editions and Ordinals, and sets Contribution Ordinal default to 1 instead of 0 + - [#299](https://github.com/thoth-pub/thoth/pull/299) - Update Project MUSE ONIX subject output logic + - Updated if and else branches to comply with [`rustc 1.56.0`](https://github.com/rust-lang/rust/releases/tag/1.56.0) + +### Fixed + - [#292](https://github.com/thoth-pub/thoth/issues/292) - Cannot unset pubiication date: error when trying to clear a previously set publication date + - [#295](https://github.com/thoth-pub/thoth/issues/295) - various subforms failing to trim strings before saving (including on mandatory fields which are checked for emptiness) + - Duplicated logic for handling optional field values, simplifying the code and reducing the likelihood of further bugs such as + - Minor issue where some required fields were not marked as "required" (so empty values would be sent to the API and raise an error) + - Issue with subforms where clicking save button bypassed field requirements (so instead of displaying a warning message such as "Please enter a number", invalid values would be sent to the API and raise an error) + - [#310](https://github.com/thoth-pub/thoth/issues/310) - Add jstor specification to formats + + ## [[0.4.7]](https://github.com/thoth-pub/thoth/releases/tag/v0.4.7) - 2021-10-04 ### Added - [#43](https://github.com/thoth-pub/thoth/issues/43), [#49](https://github.com/thoth-pub/thoth/issues/49) - Implement EBSCO Host's ONIX 2.1 specification diff --git a/Cargo.lock b/Cargo.lock index 96c13bca..9ddaeecb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3936,7 +3936,7 @@ dependencies = [ [[package]] name = "thoth" -version = "0.4.7" +version = "0.5.0" dependencies = [ "cargo-husky", "clap", @@ -3951,7 +3951,7 @@ dependencies = [ [[package]] name = "thoth-api" -version = "0.4.7" +version = "0.5.0" dependencies = [ "actix-web", "argon2rs", @@ -3980,7 +3980,7 @@ dependencies = [ [[package]] name = "thoth-api-server" -version = "0.4.7" +version = "0.5.0" dependencies = [ "actix-cors", "actix-identity", @@ -3995,7 +3995,7 @@ dependencies = [ [[package]] name = "thoth-app" -version = "0.4.7" +version = "0.5.0" dependencies = [ "anyhow", "chrono", @@ -4018,7 +4018,7 @@ dependencies = [ [[package]] name = "thoth-app-server" -version = "0.4.7" +version = "0.5.0" dependencies = [ "actix-cors", "actix-web", @@ -4027,7 +4027,7 @@ dependencies = [ [[package]] name = "thoth-client" -version = "0.4.7" +version = "0.5.0" dependencies = [ "chrono", "graphql_client", @@ -4041,7 +4041,7 @@ dependencies = [ [[package]] name = "thoth-errors" -version = "0.4.7" +version = "0.5.0" dependencies = [ "actix-web", "csv", @@ -4056,7 +4056,7 @@ dependencies = [ [[package]] name = "thoth-export-server" -version = "0.4.7" +version = "0.5.0" dependencies = [ "actix-cors", "actix-web", diff --git a/Cargo.toml b/Cargo.toml index cbe02a96..1ff6f735 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "thoth" -version = "0.4.7" +version = "0.5.0" authors = ["Javier Arias ", "Ross Higman "] edition = "2018" license = "Apache-2.0" @@ -16,11 +16,11 @@ maintenance = { status = "actively-developed" } members = ["thoth-api", "thoth-api-server", "thoth-app", "thoth-app-server", "thoth-client", "thoth-errors", "thoth-export-server"] [dependencies] -thoth-api = { version = "0.4.7", path = "thoth-api", features = ["backend"] } -thoth-api-server = { version = "0.4.7", path = "thoth-api-server" } -thoth-app-server = { version = "0.4.7", path = "thoth-app-server" } -thoth-errors = { version = "0.4.7", path = "thoth-errors" } -thoth-export-server = { version = "0.4.7", path = "thoth-export-server" } +thoth-api = { version = "0.5.0", path = "thoth-api", features = ["backend"] } +thoth-api-server = { version = "0.5.0", path = "thoth-api-server" } +thoth-app-server = { version = "0.5.0", path = "thoth-app-server" } +thoth-errors = { version = "0.5.0", path = "thoth-errors" } +thoth-export-server = { version = "0.5.0", path = "thoth-export-server" } clap = "2.33.3" dialoguer = "0.7.1" dotenv = "0.9.0" diff --git a/Dockerfile b/Dockerfile index 836434e4..45385f1f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -ARG RUST_IMAGE=rust:1.51.0 +ARG RUST_IMAGE=rust:1.56.0 ARG MUSL_IMAGE=ekidd/rust-musl-builder:1.51.0 FROM ${RUST_IMAGE} as wasm @@ -16,7 +16,7 @@ RUN npm install -g npm@6.14.8 RUN npm install -g n@6.7.0 RUN n 12.19.0 RUN npm install -g rollup@2.28.2 -RUN cargo install wasm-pack +RUN cargo install wasm-pack --version 0.9.1 # Get source COPY . . diff --git a/thoth-api-server/Cargo.toml b/thoth-api-server/Cargo.toml index 6b7506d3..2c1a70d3 100644 --- a/thoth-api-server/Cargo.toml +++ b/thoth-api-server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "thoth-api-server" -version = "0.4.7" +version = "0.5.0" authors = ["Javier Arias ", "Ross Higman "] edition = "2018" license = "Apache-2.0" @@ -9,8 +9,8 @@ repository = "https://github.com/thoth-pub/thoth" readme = "README.md" [dependencies] -thoth-api = { version = "0.4.7", path = "../thoth-api", features = ["backend"] } -thoth-errors = { version = "0.4.7", path = "../thoth-errors" } +thoth-api = { version = "0.5.0", path = "../thoth-api", features = ["backend"] } +thoth-errors = { version = "0.5.0", path = "../thoth-errors" } actix-web = "3.3.2" actix-cors = "0.5.4" actix-identity = "0.3.1" diff --git a/thoth-api/Cargo.toml b/thoth-api/Cargo.toml index c3789716..01fd41ec 100644 --- a/thoth-api/Cargo.toml +++ b/thoth-api/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "thoth-api" -version = "0.4.7" +version = "0.5.0" authors = ["Javier Arias ", "Ross Higman "] edition = "2018" license = "Apache-2.0" @@ -16,7 +16,7 @@ maintenance = { status = "actively-developed" } backend = ["diesel", "diesel-derive-enum", "diesel_migrations", "futures", "actix-web"] [dependencies] -thoth-errors = { version = "0.4.7", path = "../thoth-errors" } +thoth-errors = { version = "0.5.0", path = "../thoth-errors" } actix-web = { version = "3.3.2", optional = true } argon2rs = "0.2.5" isbn2 = "0.4.0" diff --git a/thoth-api/migrations/0.5.0/down.sql b/thoth-api/migrations/0.5.0/down.sql new file mode 100644 index 00000000..8b6ab3bf --- /dev/null +++ b/thoth-api/migrations/0.5.0/down.sql @@ -0,0 +1,39 @@ +ALTER TABLE publication + DROP CONSTRAINT publication_publication_type_work_id_uniq, + ADD COLUMN publication_url TEXT CHECK (publication_url ~* '^[^:]*:\/\/(?:[^\/:]*:[^\/@]*@)?(?:[^\/:.]*\.)+([^:\/]+)'); + +-- Migrate location URLs back into publication table as far as possible before dropping location table: +-- set the landing_page or full_text_url of the canonical location as the main publication_url, +-- then create duplicate publications to store all other location URLs (landing page/full text). +-- Note this will create multiple identical publications if the same URL is re-used across location fields. +UPDATE publication + SET publication_url = location.landing_page + FROM location + WHERE publication.publication_id = location.publication_id + AND location.canonical + AND location.landing_page IS NOT NULL; +UPDATE publication + SET publication_url = location.full_text_url + FROM location + WHERE publication.publication_id = location.publication_id + AND location.canonical + AND location.full_text_url IS NOT NULL + AND location.landing_page IS NULL; +INSERT INTO publication(publication_type, work_id, publication_url) + SELECT publication.publication_type, publication.work_id, location.landing_page FROM publication, location + WHERE publication.publication_id = location.publication_id + AND location.landing_page IS NOT NULL + AND NOT location.canonical; +INSERT INTO publication(publication_type, work_id, publication_url) + SELECT publication.publication_type, publication.work_id, location.full_text_url FROM publication, location + WHERE publication.publication_id = location.publication_id + AND location.full_text_url IS NOT NULL + AND ( + NOT location.canonical + OR (location.canonical AND location.landing_page IS NOT NULL) + ); + +DROP TABLE location_history; +DROP TRIGGER set_updated_at ON location; +DROP TABLE location; +DROP TYPE IF EXISTS location_platform; diff --git a/thoth-api/migrations/0.5.0/up.sql b/thoth-api/migrations/0.5.0/up.sql new file mode 100644 index 00000000..8081eb81 --- /dev/null +++ b/thoth-api/migrations/0.5.0/up.sql @@ -0,0 +1,363 @@ +CREATE TYPE location_platform AS ENUM ( + 'Project MUSE', + 'OAPEN', + 'DOAB', + 'JSTOR', + 'EBSCO Host', + 'OCLC KB', + 'ProQuest KB', + 'ProQuest ExLibris', + 'EBSCO KB', + 'JISC KB', + 'Other' +); + +CREATE TABLE location ( + location_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + publication_id UUID NOT NULL REFERENCES publication(publication_id) ON DELETE CASCADE, + landing_page TEXT CHECK (landing_page ~* '^[^:]*:\/\/(?:[^\/:]*:[^\/@]*@)?(?:[^\/:.]*\.)+([^:\/]+)'), + full_text_url TEXT CHECK (full_text_url ~* '^[^:]*:\/\/(?:[^\/:]*:[^\/@]*@)?(?:[^\/:.]*\.)+([^:\/]+)'), + location_platform location_platform NOT NULL DEFAULT 'Other', + canonical BOOLEAN NOT NULL DEFAULT False, + created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, + -- Location must contain at least one of landing_page or full_text_url + CONSTRAINT location_url_check CHECK (landing_page IS NOT NULL OR full_text_url IS NOT NULL) +); +SELECT diesel_manage_updated_at('location'); + +-- Only allow one canonical location per publication +CREATE UNIQUE INDEX location_uniq_canonical_true_idx ON location(publication_id) + WHERE canonical; + +-- Only allow one instance of each platform (except 'Other') per publication +CREATE UNIQUE INDEX location_uniq_platform_idx ON location(publication_id, location_platform) + WHERE NOT location_platform = 'Other'; + +CREATE TABLE location_history ( + location_history_id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + location_id UUID NOT NULL REFERENCES location(location_id) ON DELETE CASCADE, + account_id UUID NOT NULL REFERENCES account(account_id), + data JSONB NOT NULL, + timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +-------------------------------------------------------------------------------- +--- START - Data migration for live database. Delete this patch after migration +-------------------------------------------------------------------------------- + +-- Migrate punctum PDF publications (publisher ID 9c41b13c-cecc-4f6a-a151-be4682915ef5): + +-- Each work that has publications should have a canonical PDF publication under a URL beginning "https://cloud.punctumbooks.com/s/". Create a new canonical location for each of these, using the URL as the full_text_url and the work's landing page as the landing_page. + +INSERT INTO location(publication_id, landing_page, full_text_url, canonical) + SELECT publication_id, landing_page, publication_url, True + FROM publication + INNER JOIN work ON publication.work_id = work.work_id + INNER JOIN imprint ON work.imprint_id = imprint.imprint_id + WHERE imprint.publisher_id = '9c41b13c-cecc-4f6a-a151-be4682915ef5' + AND publication.publication_type = 'PDF' + AND publication.publication_url ILIKE 'https://cloud.punctumbooks.com/s/%'; + +-- Some works may have additional PDF publications under Project MUSE or JSTOR URLs. Create a new non-canonical location for each of these, using the URL as the landing_page, omitting the full_text_url, and linking to the canonical (punctum cloud) publication. + +INSERT INTO location(publication_id, landing_page, canonical, location_platform) + SELECT a.publication_id, b.publication_url, False, 'Project MUSE' + FROM publication a, publication b + INNER JOIN work ON b.work_id = work.work_id + INNER JOIN imprint ON work.imprint_id = imprint.imprint_id + WHERE imprint.publisher_id = '9c41b13c-cecc-4f6a-a151-be4682915ef5' + AND a.publication_type = 'PDF' + AND a.work_id = b.work_id + AND a.publication_url ILIKE 'https://cloud.punctumbooks.com/s/%' + AND b.publication_url ILIKE 'https://muse.jhu.edu/book/%'; + +INSERT INTO location(publication_id, landing_page, canonical, location_platform) + SELECT a.publication_id, b.publication_url, False, 'JSTOR' + FROM publication a, publication b + INNER JOIN work ON b.work_id = work.work_id + INNER JOIN imprint ON work.imprint_id = imprint.imprint_id + WHERE imprint.publisher_id = '9c41b13c-cecc-4f6a-a151-be4682915ef5' + AND a.publication_type = 'PDF' + AND a.work_id = b.work_id + AND a.publication_url ILIKE 'https://cloud.punctumbooks.com/s/%' + AND b.publication_url ILIKE 'https://www.jstor.org/stable/%'; + +-- Some works may have additional PDF publications under OAPEN URLs. Usually, one publication stores the OAPEN publication landing page, and another stores the OAPEN full text link. Create a new non-canonical location for each pair of OAPEN publications, using each URL as either the landing_page or the full_text_url as appropriate, and linking to the canonical (punctum cloud) publication. + +INSERT INTO location(publication_id, landing_page, full_text_url, canonical, location_platform) + SELECT a.publication_id, b.publication_url, c.publication_url, False, 'OAPEN' + FROM publication a, publication b, publication c + INNER JOIN work ON c.work_id = work.work_id + INNER JOIN imprint ON work.imprint_id = imprint.imprint_id + WHERE imprint.publisher_id = '9c41b13c-cecc-4f6a-a151-be4682915ef5' + AND a.publication_type = 'PDF' + AND a.work_id = b.work_id + AND a.work_id = c.work_id + AND a.publication_url ILIKE 'https://cloud.punctumbooks.com/s/%' + AND (b.publication_url ILIKE 'https://library.oapen.org/handle/%' OR b.publication_url ILIKE 'http://library.oapen.org/handle/%') + AND (c.publication_url ILIKE 'https://library.oapen.org/bitstream/%' OR c.publication_url ILIKE 'http://library.oapen.org/bitstream/%'); + +-- All MUSE, JSTOR and OAPEN PDF publications should now have had their URL data migrated to location objects. They should not contain any additional (ISBN/price, non-duplicated) data so should be safe to delete. +-- In a small number of cases, the OAPEN publications have been misclassified as 'Paperback' rather than 'PDF', so don't restrict the type when deleting. + +DELETE FROM publication USING work, imprint + WHERE publication.work_id = work.work_id + AND work.imprint_id = imprint.imprint_id + AND imprint.publisher_id = '9c41b13c-cecc-4f6a-a151-be4682915ef5' + AND (publication_url ILIKE 'https://muse.jhu.edu/book/%' OR publication_url ILIKE 'https://www.jstor.org/stable/%' OR publication_url ILIKE 'https://library.oapen.org/handle/%' OR publication_url ILIKE 'http://library.oapen.org/handle/%' OR publication_url ILIKE 'https://library.oapen.org/bitstream/%' OR publication_url ILIKE 'http://library.oapen.org/bitstream/%') + AND (isbn IS NULL OR EXISTS ( + SELECT * FROM publication b + WHERE publication.work_id = b.work_id + AND publication.isbn = b.isbn + AND b.publication_url ILIKE 'https://cloud.punctumbooks.com/s/%')) + AND NOT EXISTS (SELECT * FROM price WHERE publication.publication_id = price.publication_id); + +-- All canonical (punctum cloud) publications should now have had their URL data migrated to location objects. Their publication_url fields should therefore be safe to clear. + +UPDATE publication SET publication_url = NULL + FROM work, imprint + WHERE publication.work_id = work.work_id + AND work.imprint_id = imprint.imprint_id + AND imprint.publisher_id = '9c41b13c-cecc-4f6a-a151-be4682915ef5' + AND publication_type = 'PDF' + AND publication_url ILIKE 'https://cloud.punctumbooks.com/s/%' + AND EXISTS (SELECT * FROM location WHERE publication.publication_id = location.publication_id AND publication.publication_url = location.full_text_url); + +-- Migrate punctum paperback publications (publisher ID 9c41b13c-cecc-4f6a-a151-be4682915ef5): + +-- If a work only has one paperback publication, assume that it is the canonical one. Create a new canonical location for each of these, using the URL as the landing_page. + +INSERT INTO location(publication_id, landing_page, canonical) + SELECT publication_id, publication_url, True + FROM publication + INNER JOIN work ON publication.work_id = work.work_id + INNER JOIN imprint ON work.imprint_id = imprint.imprint_id + WHERE imprint.publisher_id = '9c41b13c-cecc-4f6a-a151-be4682915ef5' + AND publication_type = 'Paperback' + AND publication_url IS NOT NULL + AND NOT EXISTS + (SELECT * FROM publication b + WHERE publication.work_id = b.work_id + AND NOT publication.publication_id = b.publication_id + AND b.publication_type = 'Paperback'); + +-- Some works have multiple paperback publications. Inspection of the data shows that there are never more than two, they never have more than one distinct ISBN between them, and they never have more than one distinct set of prices between them (although they may have more than one distinct URL). +-- Assume that the main publication in these cases is the only one with prices, or else the only one with a URL, or else the one where the URL is a punctumbooks.com landing page (or, if all else is equal, the first one found). Create a canonical location for this publication. + +INSERT INTO location(publication_id, landing_page, canonical) + SELECT a.publication_id, a.publication_url, True + FROM publication a + LEFT JOIN price aprice ON a.publication_id = aprice.publication_id + INNER JOIN work ON a.work_id = work.work_id + INNER JOIN imprint ON work.imprint_id = imprint.imprint_id, + publication b + LEFT JOIN price bprice ON b.publication_id = bprice.publication_id + WHERE imprint.publisher_id = '9c41b13c-cecc-4f6a-a151-be4682915ef5' + AND a.publication_type = 'Paperback' + AND b.publication_type = 'Paperback' + AND a.work_id = b.work_id + AND NOT a.publication_id = b.publication_id + AND a.publication_url IS NOT NULL + AND ((aprice.publication_id IS NOT NULL AND bprice.publication_id IS NULL) + OR ((aprice.currency_code IS NOT DISTINCT FROM bprice.currency_code AND aprice.unit_price IS NOT DISTINCT FROM bprice.unit_price) + AND (b.publication_url IS NULL OR b.publication_url NOT ILIKE 'https://punctumbooks.com/titles/%'))); + +-- A single work (ID 98ce9caa-487e-4391-86c9-e5d8129be5b6) has one paperback publication with prices but no URL, and another with a URL but no prices, so it is not covered by the above. Make a canonical location for it manually, attached to the publication with prices, then remove the publication without prices. + +INSERT INTO location(publication_id, landing_page, canonical) + SELECT a.publication_id, b.publication_url, True + FROM publication a, publication b + WHERE a.work_id = '98ce9caa-487e-4391-86c9-e5d8129be5b6' + AND b.work_id = '98ce9caa-487e-4391-86c9-e5d8129be5b6' + AND a.publication_type = 'Paperback' + AND b.publication_type = 'Paperback' + AND NOT a.publication_id = b.publication_id + AND a.publication_url IS NULL + AND b.publication_url IS NOT NULL + AND EXISTS (SELECT * FROM price WHERE price.publication_id = a.publication_id) + AND NOT EXISTS (SELECT * FROM price WHERE price.publication_id = b.publication_id); + +DELETE FROM publication + WHERE work_id = '98ce9caa-487e-4391-86c9-e5d8129be5b6' + AND publication_type = 'Paperback' + AND NOT EXISTS (SELECT * FROM price WHERE price.publication_id = publication_id); + +-- Create non-canonical locations under the main publication for all the other URLs associated with this work. + +INSERT INTO location(publication_id, landing_page, canonical) + SELECT a.publication_id, b.publication_url, False + FROM publication a + INNER JOIN work ON a.work_id = work.work_id + INNER JOIN imprint ON work.imprint_id = imprint.imprint_id, + publication b + WHERE imprint.publisher_id = '9c41b13c-cecc-4f6a-a151-be4682915ef5' + AND a.publication_type = 'Paperback' + AND b.publication_type = 'Paperback' + AND a.work_id = b.work_id + AND NOT a.publication_id = b.publication_id + AND EXISTS (SELECT * FROM location WHERE a.publication_id = location.publication_id) + AND b.publication_url IS NOT NULL + AND b.publication_url IS DISTINCT FROM a.publication_url; + +-- For any case where the main publication lacks an ISBN, carry over the ISBN (if any) from the other publication. + +UPDATE publication + SET isbn = b.isbn + FROM publication b, work, imprint + WHERE publication.work_id = work.work_id + AND work.imprint_id = imprint.imprint_id + AND imprint.publisher_id = '9c41b13c-cecc-4f6a-a151-be4682915ef5' + AND publication.publication_type = 'Paperback' + AND b.publication_type = 'Paperback' + AND publication.work_id = b.work_id + AND NOT publication.publication_id = b.publication_id + AND EXISTS (SELECT * FROM location WHERE publication.publication_id = location.publication_id) + AND publication.isbn IS NULL; + +-- All price, ISBN and URL information in non-main publications should now either be duplicated on the main publication or stored in the location table. Delete these publications. + +DELETE FROM publication USING work, imprint, publication b + WHERE publication.work_id = work.work_id + AND work.imprint_id = imprint.imprint_id + AND imprint.publisher_id = '9c41b13c-cecc-4f6a-a151-be4682915ef5' + AND publication.publication_type = 'Paperback' + AND b.publication_type = 'Paperback' + AND publication.work_id = b.work_id + AND NOT publication.publication_id = b.publication_id + AND NOT EXISTS (SELECT * FROM location WHERE publication.publication_id = location.publication_id) + AND (publication.publication_url IS NULL OR EXISTS (SELECT * FROM location WHERE b.publication_id = location.publication_id AND publication.publication_url = location.landing_page)) + AND (publication.isbn IS NOT DISTINCT FROM b.isbn OR publication.isbn IS NULL) + AND NOT EXISTS (SELECT unit_price, currency_code FROM price WHERE price.publication_id = publication.publication_id EXCEPT SELECT unit_price, currency_code FROM price WHERE price.publication_id = b.publication_id); + +-- All remaining publication_urls should now be listed in the location table as the canonical URL for that publication. Remove them from the publications. + +UPDATE publication SET publication_url = NULL + FROM work, imprint + WHERE publication.work_id = work.work_id + AND work.imprint_id = imprint.imprint_id + AND imprint.publisher_id = '9c41b13c-cecc-4f6a-a151-be4682915ef5' + AND publication_type = 'Paperback' + AND publication_url IS NOT NULL + AND EXISTS (SELECT * FROM location WHERE publication.publication_id = location.publication_id AND publication.publication_url = location.landing_page); + +-- Migrate remaining duplicate publications: + +-- A single meson press work (ID 38872158-58b9-4ddf-a90e-f6001ac6c62d) accounts for all remaining duplicate publications. Inspection of the data shows two PDFs with differing URLs, identical ISBNs and no prices, and three paperbacks with differing URLs, identical ISBNs and two different prices (each in a different currency) between them. Handle these individually. + +-- PDFs: one has a meson.press URL, the other an OAPEN URL. Assume that the former is the main one. Create a canonical location for it, create a secondary location for the other one, and then delete the other one and remove the main one's publication_url. + +INSERT INTO location(publication_id, landing_page, full_text_url, canonical) + SELECT publication_id, landing_page, publication_url, True + FROM publication + INNER JOIN work ON publication.work_id = work.work_id + WHERE publication.work_id = '38872158-58b9-4ddf-a90e-f6001ac6c62d' + AND publication.publication_type = 'PDF' + AND publication.publication_url ILIKE 'https://meson.press/wp-content/uploads/%'; + +INSERT INTO location(publication_id, landing_page, canonical, location_platform) + SELECT a.publication_id, b.publication_url, False, 'OAPEN' + FROM publication a, publication b + WHERE a.work_id = '38872158-58b9-4ddf-a90e-f6001ac6c62d' + AND b.work_id = '38872158-58b9-4ddf-a90e-f6001ac6c62d' + AND a.publication_type = 'PDF' + AND b.publication_type = 'PDF' + AND a.publication_url ILIKE 'https://meson.press/wp-content/uploads/%' + AND b.publication_url ILIKE 'https://library.oapen.org/bitstream/%'; + +DELETE FROM publication + WHERE publication.work_id = '38872158-58b9-4ddf-a90e-f6001ac6c62d' + AND publication.publication_type = 'PDF' + AND publication.publication_url ILIKE 'https://library.oapen.org/bitstream/%' + AND (isbn IS NULL OR EXISTS ( + SELECT * FROM publication b + WHERE publication.work_id = b.work_id + AND publication.isbn = b.isbn + AND b.publication_url ILIKE 'https://meson.press/wp-content/uploads/%')) + AND NOT EXISTS (SELECT * FROM price WHERE publication.publication_id = price.publication_id); + +UPDATE publication SET publication_url = NULL + WHERE publication.work_id = '38872158-58b9-4ddf-a90e-f6001ac6c62d' + AND publication.publication_type = 'PDF' + AND publication.publication_url ILIKE 'https://meson.press/wp-content/uploads/%' + AND EXISTS (SELECT * FROM location WHERE publication.publication_id = location.publication_id AND publication.publication_url = location.full_text_url); + +-- Paperbacks: none of the URLs are meson.press, so assume that the first publication entered (which has ID 1382662a-ae40-47ae-98a0-34e03ae71366) is the main one. Create a canonical location for it. + +INSERT INTO location(publication_id, landing_page, canonical) + SELECT publication_id, publication_url, True + FROM publication + WHERE publication.publication_id = '1382662a-ae40-47ae-98a0-34e03ae71366'; + +-- Create non-canonical locations for the other publications, linked to the main one. + +INSERT INTO location(publication_id, landing_page, canonical) + SELECT '1382662a-ae40-47ae-98a0-34e03ae71366', publication_url, False + FROM publication + WHERE publication.work_id = '38872158-58b9-4ddf-a90e-f6001ac6c62d' + AND publication.publication_type = 'Paperback' + AND NOT publication.publication_id = '1382662a-ae40-47ae-98a0-34e03ae71366'; + +-- One of the prices linked to a non-main publication is not duplicated on the main publication. Move it to the main publication. + +UPDATE price SET publication_id = '1382662a-ae40-47ae-98a0-34e03ae71366' + WHERE publication_id = '49003581-5829-457a-b626-a5ab30df9a55'; + +-- The non-main paperback publications can now be deleted, and the main publication_url cleared. + +DELETE FROM publication + WHERE publication.work_id = '38872158-58b9-4ddf-a90e-f6001ac6c62d' + AND publication.publication_type = 'Paperback' + AND NOT publication.publication_id = '1382662a-ae40-47ae-98a0-34e03ae71366'; + +UPDATE publication SET publication_url = NULL WHERE publication_id = '1382662a-ae40-47ae-98a0-34e03ae71366'; + +-- Migrate all remaining publications: + +-- All remaining publications across all publishers should now be unique per work/publication type. Therefore, any URLs which they have can be converted to canonical locations. For hard copy types, convert the publication_url to the location landing_page. For soft copy types, convert the publication_url to the location full_text_url and use the work landing_page as the location landing_page. +-- Double-check that no location entry already exists for the publication, and no duplicate publication exists. + +INSERT INTO location(publication_id, landing_page, canonical) + SELECT publication_id, publication_url, True + FROM publication + WHERE (publication.publication_type = 'Paperback' OR publication.publication_type = 'Hardback') + AND publication_url IS NOT NULL + AND NOT EXISTS (SELECT * FROM publication b + WHERE publication.work_id = b.work_id + AND NOT publication.publication_id = b.publication_id + AND publication.publication_type = b.publication_type) + AND NOT EXISTS (SELECT * FROM location WHERE publication.publication_id = location.publication_id AND publication.publication_url = location.landing_page); + +INSERT INTO location(publication_id, landing_page, full_text_url, canonical) + SELECT publication_id, landing_page, publication_url, True + FROM publication + INNER JOIN work ON publication.work_id = work.work_id + WHERE (publication.publication_type = 'PDF' OR publication.publication_type = 'Epub' OR publication.publication_type = 'XML' OR publication.publication_type = 'Mobi' OR publication.publication_type = 'HTML') + AND publication_url IS NOT NULL + AND NOT EXISTS (SELECT * FROM publication b + WHERE publication.work_id = b.work_id + AND NOT publication.publication_id = b.publication_id + AND publication.publication_type = b.publication_type) + AND NOT EXISTS (SELECT * FROM location WHERE publication.publication_id = location.publication_id AND publication.publication_url = location.landing_page); + +-- All these publications can now have their URLs cleared. + +UPDATE publication SET publication_url = NULL + FROM work + WHERE publication_url IS NOT NULL + AND NOT EXISTS (SELECT * FROM publication b + WHERE publication.work_id = b.work_id + AND NOT publication.publication_id = b.publication_id + AND publication.publication_type = b.publication_type) + AND EXISTS (SELECT * FROM location WHERE publication.publication_id = location.publication_id AND (publication.publication_url = location.landing_page OR publication.publication_url = location.full_text_url)); +----------------------------------------------------------------------------- +--- END - Data migration for live database. Delete this patch after migration +----------------------------------------------------------------------------- + +ALTER TABLE publication + -- Only allow one publication of each type per work (existing data may breach this) + -- To check for records which breach this constraint: + -- `select * from publication a where (select count(*) from publication b where a.publication_type = b.publication_type and a.work_id = b.work_id) > 1 order by work_id, publication_type;` + ADD CONSTRAINT publication_publication_type_work_id_uniq UNIQUE (publication_type, work_id), + -- Remove publication_url column (all data should have been migrated to location table above) + DROP COLUMN publication_url; diff --git a/thoth-api/src/account/model.rs b/thoth-api/src/account/model.rs index bd0c3dc3..9c8cce41 100644 --- a/thoth-api/src/account/model.rs +++ b/thoth-api/src/account/model.rs @@ -137,12 +137,11 @@ impl DecodedToken { impl AccountAccess { pub fn can_edit(&self, publisher_id: Uuid) -> ThothResult<()> { - if self.is_superuser { - Ok(()) - } else if let Some(_found) = &self - .linked_publishers - .iter() - .position(|publisher| publisher.publisher_id == publisher_id) + if self.is_superuser + || self + .linked_publishers + .iter() + .any(|publisher| publisher.publisher_id == publisher_id) { Ok(()) } else { diff --git a/thoth-api/src/graphql/model.rs b/thoth-api/src/graphql/model.rs index 4a1d1083..5cfd35ce 100644 --- a/thoth-api/src/graphql/model.rs +++ b/thoth-api/src/graphql/model.rs @@ -14,6 +14,7 @@ use crate::model::funding::*; use crate::model::imprint::*; use crate::model::issue::*; use crate::model::language::*; +use crate::model::location::*; use crate::model::price::*; use crate::model::publication::*; use crate::model::publisher::*; @@ -71,6 +72,13 @@ pub struct LanguageOrderBy { pub direction: Direction, } +#[derive(juniper::GraphQLInputObject)] +#[graphql(description = "Field and order to use when sorting locations list")] +pub struct LocationOrderBy { + pub field: LocationField, + pub direction: Direction, +} + #[derive(juniper::GraphQLInputObject)] #[graphql(description = "Field and order to use when sorting prices list")] pub struct PriceOrderBy { @@ -197,7 +205,7 @@ impl QueryRoot { offset(default = 0, description = "The number of items to skip"), filter( default = "".to_string(), - description = "A query string to search. This argument is a test, do not rely on it. At present it simply searches for case insensitive literals on isbn and publication_url" + description = "A query string to search. This argument is a test, do not rely on it. At present it simply searches for case insensitive literals on isbn" ), order( default = PublicationOrderBy::default(), @@ -244,7 +252,7 @@ impl QueryRoot { arguments( filter( default = "".to_string(), - description = "A query string to search. This argument is a test, do not rely on it. At present it simply searches for case insensitive literals on isbn and publication_url", + description = "A query string to search. This argument is a test, do not rely on it. At present it simply searches for case insensitive literals on isbn", ), publishers( default = vec![], @@ -707,6 +715,63 @@ impl QueryRoot { .map_err(|e| e.into()) } + #[graphql( + description = "Query the full list of locations", + arguments( + limit(default = 100, description = "The number of items to return"), + offset(default = 0, description = "The number of items to skip"), + order( + default = { + LocationOrderBy { + field: LocationField::LocationPlatform, + direction: Direction::Asc, + } + }, + description = "The order in which to sort the results", + ), + publishers( + default = vec![], + description = "If set, only shows results connected to publishers with these IDs", + ), + location_platform(description = "A specific platform to filter by"), + ) + )] + fn locations( + context: &Context, + limit: i32, + offset: i32, + order: LocationOrderBy, + publishers: Vec, + location_platform: Option, + ) -> FieldResult> { + Location::all( + &context.db, + limit, + offset, + None, + order, + publishers, + None, + None, + location_platform, + None, + ) + .map_err(|e| e.into()) + } + + #[graphql(description = "Query a single location using its id")] + fn location(context: &Context, location_id: Uuid) -> FieldResult { + Location::from_id(&context.db, &location_id).map_err(|e| e.into()) + } + + #[graphql(description = "Get the total number of locations associated to works")] + fn location_count( + context: &Context, + location_platform: Option, + ) -> FieldResult { + Location::count(&context.db, None, vec![], location_platform, None).map_err(|e| e.into()) + } + #[graphql( description = "Query the full list of prices", arguments( @@ -991,6 +1056,10 @@ impl MutationRoot { .account_access .can_edit(publisher_id_from_work_id(&context.db, data.work_id)?)?; + if !data.isbn.is_none() { + data.can_have_isbn(&context.db)?; + } + Publication::create(&context.db, &data).map_err(|e| e.into()) } @@ -1037,6 +1106,24 @@ impl MutationRoot { Funding::create(&context.db, &data).map_err(|e| e.into()) } + fn create_location(context: &Context, data: NewLocation) -> FieldResult { + context.token.jwt.as_ref().ok_or(ThothError::Unauthorised)?; + context + .account_access + .can_edit(publisher_id_from_publication_id( + &context.db, + data.publication_id, + )?)?; + + if data.canonical { + data.canonical_record_complete(&context.db)?; + } else { + data.can_be_non_canonical(&context.db)?; + } + + Location::create(&context.db, &data).map_err(|e| e.into()) + } + fn create_price(context: &Context, data: NewPrice) -> FieldResult { context.token.jwt.as_ref().ok_or(ThothError::Unauthorised)?; context @@ -1073,6 +1160,11 @@ impl MutationRoot { .can_edit(publisher_id_from_imprint_id(&context.db, data.imprint_id)?)?; work.can_update_imprint(&context.db)?; } + + if data.work_type == WorkType::BookChapter { + work.can_be_chapter(&context.db)?; + } + let account_id = context.token.jwt.as_ref().unwrap().account_id(&context.db); work.update_with_units(&context.db, data, &account_id, units) .map_err(|e| e.into()) @@ -1150,6 +1242,11 @@ impl MutationRoot { .account_access .can_edit(publisher_id_from_work_id(&context.db, data.work_id)?)?; } + + if !data.isbn.is_none() { + data.can_have_isbn(&context.db)?; + } + let account_id = context.token.jwt.as_ref().unwrap().account_id(&context.db); publication .update(&context.db, &data, &account_id) @@ -1241,6 +1338,39 @@ impl MutationRoot { .map_err(|e| e.into()) } + fn update_location(context: &Context, data: PatchLocation) -> FieldResult { + context.token.jwt.as_ref().ok_or(ThothError::Unauthorised)?; + let location = Location::from_id(&context.db, &data.location_id).unwrap(); + context + .account_access + .can_edit(location.publisher_id(&context.db)?)?; + + if !(data.publication_id == location.publication_id) { + context + .account_access + .can_edit(publisher_id_from_publication_id( + &context.db, + data.publication_id, + )?)?; + } + + if !(data.canonical == location.canonical) { + // Each publication must have exactly one canonical location. + // Updating an existing location would always violate this, + // as it should always result in either zero or two canonical locations. + return Err(ThothError::CanonicalLocationError.into()); + } + + if data.canonical { + data.canonical_record_complete(&context.db)?; + } + + let account_id = context.token.jwt.as_ref().unwrap().account_id(&context.db); + location + .update(&context.db, &data, &account_id) + .map_err(|e| e.into()) + } + fn update_price(context: &Context, data: PatchPrice) -> FieldResult { context.token.jwt.as_ref().ok_or(ThothError::Unauthorised)?; let price = Price::from_id(&context.db, &data.price_id).unwrap(); @@ -1388,6 +1518,16 @@ impl MutationRoot { funding.delete(&context.db).map_err(|e| e.into()) } + fn delete_location(context: &Context, location_id: Uuid) -> FieldResult { + context.token.jwt.as_ref().ok_or(ThothError::Unauthorised)?; + let location = Location::from_id(&context.db, &location_id).unwrap(); + context + .account_access + .can_edit(location.publisher_id(&context.db)?)?; + + location.delete(&context.db).map_err(|e| e.into()) + } + fn delete_price(context: &Context, price_id: Uuid) -> FieldResult { context.token.jwt.as_ref().ok_or(ThothError::Unauthorised)?; let price = Price::from_id(&context.db, &price_id).unwrap(); @@ -1663,7 +1803,7 @@ impl Work { offset(default = 0, description = "The number of items to skip"), filter( default = "".to_string(), - description = "A query string to search. This argument is a test, do not rely on it. At present it simply searches for case insensitive literals on isbn and publication_url" + description = "A query string to search. This argument is a test, do not rely on it. At present it simply searches for case insensitive literals on isbn" ), order( default = { @@ -1841,10 +1981,6 @@ impl Publication { self.isbn.as_ref() } - pub fn publication_url(&self) -> Option<&String> { - self.publication_url.as_ref() - } - pub fn created_at(&self) -> Timestamp { self.created_at.clone() } @@ -1893,6 +2029,46 @@ impl Publication { .map_err(|e| e.into()) } + #[graphql( + description = "Get locations linked to this publication", + arguments( + limit(default = 100, description = "The number of items to return"), + offset(default = 0, description = "The number of items to skip"), + order( + default = { + LocationOrderBy { + field: LocationField::LocationPlatform, + direction: Direction::Asc, + } + }, + description = "The order in which to sort the results", + ), + location_platform(description = "A specific platform to filter by"), + ) + )] + pub fn locations( + &self, + context: &Context, + limit: i32, + offset: i32, + order: LocationOrderBy, + location_platform: Option, + ) -> FieldResult> { + Location::all( + &context.db, + limit, + offset, + None, + order, + vec![], + Some(self.publication_id), + None, + location_platform, + None, + ) + .map_err(|e| e.into()) + } + pub fn work(&self, context: &Context) -> FieldResult { Work::from_id(&context.db, &self.work_id).map_err(|e| e.into()) } @@ -2340,6 +2516,45 @@ impl Language { } } +#[juniper::object(Context = Context, description = "A location, such as a web shop or distribution platform, where a publication can be acquired or viewed.")] +impl Location { + pub fn location_id(&self) -> Uuid { + self.location_id + } + + pub fn publication_id(&self) -> Uuid { + self.publication_id + } + + pub fn landing_page(&self) -> Option<&String> { + self.landing_page.as_ref() + } + + pub fn full_text_url(&self) -> Option<&String> { + self.full_text_url.as_ref() + } + + pub fn location_platform(&self) -> &LocationPlatform { + &self.location_platform + } + + pub fn canonical(&self) -> bool { + self.canonical + } + + pub fn created_at(&self) -> Timestamp { + self.created_at.clone() + } + + pub fn updated_at(&self) -> Timestamp { + self.updated_at.clone() + } + + pub fn publication(&self, context: &Context) -> FieldResult { + Publication::from_id(&context.db, &self.publication_id).map_err(|e| e.into()) + } +} + #[juniper::object(Context = Context, description = "The amount of money, in any currency, that a publication costs.")] impl Price { pub fn price_id(&self) -> Uuid { diff --git a/thoth-api/src/model/contribution/mod.rs b/thoth-api/src/model/contribution/mod.rs index 42f823ee..805153d3 100644 --- a/thoth-api/src/model/contribution/mod.rs +++ b/thoth-api/src/model/contribution/mod.rs @@ -55,7 +55,7 @@ pub enum ContributionField { } #[cfg_attr(feature = "backend", derive(Queryable))] -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] #[serde(rename_all = "camelCase")] pub struct Contribution { pub contribution_id: Uuid, @@ -143,6 +143,26 @@ impl Default for ContributionType { } } +impl Default for Contribution { + fn default() -> Contribution { + Contribution { + contribution_id: Default::default(), + work_id: Default::default(), + contributor_id: Default::default(), + contribution_type: Default::default(), + main_contribution: Default::default(), + biography: Default::default(), + institution: Default::default(), + created_at: Default::default(), + updated_at: Default::default(), + first_name: Default::default(), + last_name: Default::default(), + full_name: Default::default(), + contribution_ordinal: 1, + } + } +} + #[test] fn test_contributiontype_default() { let contributiontype: ContributionType = Default::default(); diff --git a/thoth-api/src/model/location/crud.rs b/thoth-api/src/model/location/crud.rs new file mode 100644 index 00000000..37b9fc10 --- /dev/null +++ b/thoth-api/src/model/location/crud.rs @@ -0,0 +1,260 @@ +use super::{ + Location, LocationField, LocationHistory, LocationPlatform, NewLocation, NewLocationHistory, + PatchLocation, +}; +use crate::graphql::model::LocationOrderBy; +use crate::graphql::utils::Direction; +use crate::model::{Crud, DbInsert, HistoryEntry}; +use crate::schema::{location, location_history}; +use crate::{crud_methods, db_insert}; +use diesel::{ExpressionMethods, QueryDsl, RunQueryDsl}; +use thoth_errors::{ThothError, ThothResult}; +use uuid::Uuid; + +impl Crud for Location { + type NewEntity = NewLocation; + type PatchEntity = PatchLocation; + type OrderByEntity = LocationOrderBy; + type FilterParameter1 = LocationPlatform; + type FilterParameter2 = (); + + fn pk(&self) -> Uuid { + self.location_id + } + + fn all( + db: &crate::db::PgPool, + limit: i32, + offset: i32, + _: Option, + order: Self::OrderByEntity, + publishers: Vec, + parent_id_1: Option, + _: Option, + location_platform: Option, + _: Option, + ) -> ThothResult> { + use crate::schema::location::dsl; + let connection = db.get().unwrap(); + let mut query = + dsl::location + .inner_join(crate::schema::publication::table.inner_join( + crate::schema::work::table.inner_join(crate::schema::imprint::table), + )) + .select(( + dsl::location_id, + dsl::publication_id, + dsl::landing_page, + dsl::full_text_url, + dsl::location_platform, + dsl::canonical, + dsl::created_at, + dsl::updated_at, + )) + .into_boxed(); + + match order.field { + LocationField::LocationId => match order.direction { + Direction::Asc => query = query.order(dsl::location_id.asc()), + Direction::Desc => query = query.order(dsl::location_id.desc()), + }, + LocationField::PublicationId => match order.direction { + Direction::Asc => query = query.order(dsl::publication_id.asc()), + Direction::Desc => query = query.order(dsl::publication_id.desc()), + }, + LocationField::LandingPage => match order.direction { + Direction::Asc => query = query.order(dsl::landing_page.asc()), + Direction::Desc => query = query.order(dsl::landing_page.desc()), + }, + LocationField::FullTextUrl => match order.direction { + Direction::Asc => query = query.order(dsl::full_text_url.asc()), + Direction::Desc => query = query.order(dsl::full_text_url.desc()), + }, + LocationField::LocationPlatform => match order.direction { + Direction::Asc => query = query.order(dsl::location_platform.asc()), + Direction::Desc => query = query.order(dsl::location_platform.desc()), + }, + LocationField::Canonical => match order.direction { + Direction::Asc => query = query.order(dsl::canonical.asc()), + Direction::Desc => query = query.order(dsl::canonical.desc()), + }, + LocationField::CreatedAt => match order.direction { + Direction::Asc => query = query.order(dsl::created_at.asc()), + Direction::Desc => query = query.order(dsl::created_at.desc()), + }, + LocationField::UpdatedAt => match order.direction { + Direction::Asc => query = query.order(dsl::updated_at.asc()), + Direction::Desc => query = query.order(dsl::updated_at.desc()), + }, + } + // This loop must appear before any other filter statements, as it takes advantage of + // the behaviour of `or_filter` being equal to `filter` when no other filters are present yet. + // Result needs to be `WHERE (x = $1 [OR x = $2...]) AND ([...])` - note bracketing. + for pub_id in publishers { + query = query.or_filter(crate::schema::imprint::publisher_id.eq(pub_id)); + } + if let Some(pid) = parent_id_1 { + query = query.filter(dsl::publication_id.eq(pid)); + } + if let Some(loc_platform) = location_platform { + query = query.filter(dsl::location_platform.eq(loc_platform)); + } + match query + .limit(limit.into()) + .offset(offset.into()) + .load::(&connection) + { + Ok(t) => Ok(t), + Err(e) => Err(ThothError::from(e)), + } + } + + fn count( + db: &crate::db::PgPool, + _: Option, + _: Vec, + location_platform: Option, + _: Option, + ) -> ThothResult { + use crate::schema::location::dsl; + let connection = db.get().unwrap(); + let mut query = dsl::location.into_boxed(); + if let Some(loc_platform) = location_platform { + query = query.filter(dsl::location_platform.eq(loc_platform)); + } + // `SELECT COUNT(*)` in postgres returns a BIGINT, which diesel parses as i64. Juniper does + // not implement i64 yet, only i32. The only sensible way, albeit shameful, to solve this + // is converting i64 to string and then parsing it as i32. This should work until we reach + // 2147483647 records - if you are fixing this bug, congratulations on book number 2147483647! + match query.count().get_result::(&connection) { + Ok(t) => Ok(t.to_string().parse::().unwrap()), + Err(e) => Err(ThothError::from(e)), + } + } + + fn publisher_id(&self, db: &crate::db::PgPool) -> ThothResult { + crate::model::publication::Publication::from_id(db, &self.publication_id)?.publisher_id(db) + } + + crud_methods!(location::table, location::dsl::location); +} + +impl HistoryEntry for Location { + type NewHistoryEntity = NewLocationHistory; + + fn new_history_entry(&self, account_id: &Uuid) -> Self::NewHistoryEntity { + Self::NewHistoryEntity { + location_id: self.location_id, + account_id: *account_id, + data: serde_json::Value::String(serde_json::to_string(&self).unwrap()), + } + } +} + +impl DbInsert for NewLocationHistory { + type MainEntity = LocationHistory; + + db_insert!(location_history::table); +} + +impl NewLocation { + pub fn can_be_non_canonical(&self, db: &crate::db::PgPool) -> ThothResult<()> { + use crate::schema::location::dsl; + use diesel::prelude::*; + + let connection = db.get().unwrap(); + let canonical_count = dsl::location + .filter(dsl::publication_id.eq(self.publication_id)) + .filter(dsl::canonical) + .count() + .get_result::(&connection) + .expect("Error loading locations for publication") + .to_string() + .parse::() + .unwrap(); + // A location can only be non-canonical if another location + // marked as canonical exists for the same publication. + if canonical_count == 0 { + Err(ThothError::CanonicalLocationError) + } else { + Ok(()) + } + } + + pub fn canonical_record_complete(&self, db: &crate::db::PgPool) -> ThothResult<()> { + location_canonical_record_complete( + self.publication_id, + &self.landing_page, + &self.full_text_url, + db, + ) + } +} + +impl PatchLocation { + pub fn canonical_record_complete(&self, db: &crate::db::PgPool) -> ThothResult<()> { + location_canonical_record_complete( + self.publication_id, + &self.landing_page, + &self.full_text_url, + db, + ) + } +} + +fn location_canonical_record_complete( + publication_id: Uuid, + landing_page: &Option, + full_text_url: &Option, + db: &crate::db::PgPool, +) -> ThothResult<()> { + // If a canonical location has both the possible URLs, it is always complete. + if landing_page.is_some() && full_text_url.is_some() { + Ok(()) + } else { + use crate::model::publication::PublicationType; + use diesel::prelude::*; + + let connection = db.get().unwrap(); + let publication_type = crate::schema::publication::table + .select(crate::schema::publication::publication_type) + .filter(crate::schema::publication::publication_id.eq(publication_id)) + .first::(&connection) + .expect("Error loading publication type for location"); + // If a canonical location's publication is of a digital type, + // it must have both the possible URLs to count as complete. + if publication_type != PublicationType::Hardback + && publication_type != PublicationType::Paperback + { + Err(ThothError::LocationUrlError) + } else { + // For non-digital types, at least one URL must be present, + // but exceptions to this will be caught at the database level. + Ok(()) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_location_pk() { + let location: Location = Default::default(); + assert_eq!(location.pk(), location.location_id); + } + + #[test] + fn test_new_location_history_from_location() { + let location: Location = Default::default(); + let account_id: Uuid = Default::default(); + let new_location_history = location.new_history_entry(&account_id); + assert_eq!(new_location_history.location_id, location.location_id); + assert_eq!(new_location_history.account_id, account_id); + assert_eq!( + new_location_history.data, + serde_json::Value::String(serde_json::to_string(&location).unwrap()) + ); + } +} diff --git a/thoth-api/src/model/location/mod.rs b/thoth-api/src/model/location/mod.rs new file mode 100644 index 00000000..6bd64cf7 --- /dev/null +++ b/thoth-api/src/model/location/mod.rs @@ -0,0 +1,211 @@ +use serde::{Deserialize, Serialize}; +use strum::Display; +use strum::EnumString; +use uuid::Uuid; + +use crate::model::Timestamp; +#[cfg(feature = "backend")] +use crate::schema::location; +#[cfg(feature = "backend")] +use crate::schema::location_history; + +#[cfg_attr(feature = "backend", derive(DbEnum, juniper::GraphQLEnum))] +#[cfg_attr(feature = "backend", DieselType = "Location_platform")] +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, EnumString, Display)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum LocationPlatform { + #[cfg_attr(feature = "backend", db_rename = "Project MUSE")] + #[strum(serialize = "Project MUSE")] + ProjectMuse, + #[cfg_attr(feature = "backend", db_rename = "OAPEN")] + #[strum(serialize = "OAPEN")] + Oapen, + #[cfg_attr(feature = "backend", db_rename = "DOAB")] + #[strum(serialize = "DOAB")] + Doab, + #[cfg_attr(feature = "backend", db_rename = "JSTOR")] + #[strum(serialize = "JSTOR")] + Jstor, + #[cfg_attr(feature = "backend", db_rename = "EBSCO Host")] + #[strum(serialize = "EBSCO Host")] + EbscoHost, + #[cfg_attr(feature = "backend", db_rename = "OCLC KB")] + #[strum(serialize = "OCLC KB")] + OclcKb, + #[cfg_attr(feature = "backend", db_rename = "ProQuest KB")] + #[strum(serialize = "ProQuest KB")] + ProquestKb, + #[cfg_attr(feature = "backend", db_rename = "ProQuest ExLibris")] + #[strum(serialize = "ProQuest ExLibris")] + ProquestExlibris, + #[cfg_attr(feature = "backend", db_rename = "EBSCO KB")] + #[strum(serialize = "EBSCO KB")] + EbscoKb, + #[cfg_attr(feature = "backend", db_rename = "JISC KB")] + #[strum(serialize = "JISC KB")] + JiscKb, + #[cfg_attr(feature = "backend", db_rename = "Other")] + Other, +} + +#[cfg_attr( + feature = "backend", + derive(juniper::GraphQLEnum), + graphql(description = "Field to use when sorting locations list") +)] +pub enum LocationField { + LocationId, + PublicationId, + LandingPage, + FullTextUrl, + LocationPlatform, + Canonical, + CreatedAt, + UpdatedAt, +} + +#[cfg_attr(feature = "backend", derive(Queryable))] +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct Location { + pub location_id: Uuid, + pub publication_id: Uuid, + pub landing_page: Option, + pub full_text_url: Option, + pub location_platform: LocationPlatform, + pub canonical: bool, + pub created_at: Timestamp, + pub updated_at: Timestamp, +} + +#[cfg_attr( + feature = "backend", + derive(juniper::GraphQLInputObject, Insertable), + table_name = "location" +)] +pub struct NewLocation { + pub publication_id: Uuid, + pub landing_page: Option, + pub full_text_url: Option, + pub location_platform: LocationPlatform, + pub canonical: bool, +} + +#[cfg_attr( + feature = "backend", + derive(juniper::GraphQLInputObject, AsChangeset), + changeset_options(treat_none_as_null = "true"), + table_name = "location" +)] +pub struct PatchLocation { + pub location_id: Uuid, + pub publication_id: Uuid, + pub landing_page: Option, + pub full_text_url: Option, + pub location_platform: LocationPlatform, + pub canonical: bool, +} + +#[cfg_attr(feature = "backend", derive(Queryable))] +pub struct LocationHistory { + pub location_history_id: Uuid, + pub location_id: Uuid, + pub account_id: Uuid, + pub data: serde_json::Value, + pub timestamp: Timestamp, +} + +#[cfg_attr( + feature = "backend", + derive(Insertable), + table_name = "location_history" +)] +pub struct NewLocationHistory { + pub location_id: Uuid, + pub account_id: Uuid, + pub data: serde_json::Value, +} + +impl Default for LocationPlatform { + fn default() -> LocationPlatform { + LocationPlatform::Other + } +} + +#[test] +fn test_locationplatform_default() { + let locationplatform: LocationPlatform = Default::default(); + assert_eq!(locationplatform, LocationPlatform::Other); +} + +#[test] +fn test_locationplatform_display() { + assert_eq!(format!("{}", LocationPlatform::ProjectMuse), "Project MUSE"); + assert_eq!(format!("{}", LocationPlatform::Oapen), "OAPEN"); + assert_eq!(format!("{}", LocationPlatform::Doab), "DOAB"); + assert_eq!(format!("{}", LocationPlatform::Jstor), "JSTOR"); + assert_eq!(format!("{}", LocationPlatform::EbscoHost), "EBSCO Host"); + assert_eq!(format!("{}", LocationPlatform::OclcKb), "OCLC KB"); + assert_eq!(format!("{}", LocationPlatform::ProquestKb), "ProQuest KB"); + assert_eq!( + format!("{}", LocationPlatform::ProquestExlibris), + "ProQuest ExLibris" + ); + assert_eq!(format!("{}", LocationPlatform::EbscoKb), "EBSCO KB"); + assert_eq!(format!("{}", LocationPlatform::JiscKb), "JISC KB"); + assert_eq!(format!("{}", LocationPlatform::Other), "Other"); +} + +#[test] +fn test_locationplatform_fromstr() { + use std::str::FromStr; + assert_eq!( + LocationPlatform::from_str("Project MUSE").unwrap(), + LocationPlatform::ProjectMuse + ); + assert_eq!( + LocationPlatform::from_str("OAPEN").unwrap(), + LocationPlatform::Oapen + ); + assert_eq!( + LocationPlatform::from_str("DOAB").unwrap(), + LocationPlatform::Doab + ); + assert_eq!( + LocationPlatform::from_str("JSTOR").unwrap(), + LocationPlatform::Jstor + ); + assert_eq!( + LocationPlatform::from_str("EBSCO Host").unwrap(), + LocationPlatform::EbscoHost + ); + assert_eq!( + LocationPlatform::from_str("OCLC KB").unwrap(), + LocationPlatform::OclcKb + ); + assert_eq!( + LocationPlatform::from_str("ProQuest KB").unwrap(), + LocationPlatform::ProquestKb + ); + assert_eq!( + LocationPlatform::from_str("ProQuest ExLibris").unwrap(), + LocationPlatform::ProquestExlibris + ); + assert_eq!( + LocationPlatform::from_str("EBSCO KB").unwrap(), + LocationPlatform::EbscoKb + ); + assert_eq!( + LocationPlatform::from_str("JISC KB").unwrap(), + LocationPlatform::JiscKb + ); + assert_eq!( + LocationPlatform::from_str("Other").unwrap(), + LocationPlatform::Other + ); + assert!(LocationPlatform::from_str("Amazon").is_err()); + assert!(LocationPlatform::from_str("Twitter").is_err()); +} + +#[cfg(feature = "backend")] +pub mod crud; diff --git a/thoth-api/src/model/mod.rs b/thoth-api/src/model/mod.rs index e4a7a559..88bbceaa 100644 --- a/thoth-api/src/model/mod.rs +++ b/thoth-api/src/model/mod.rs @@ -736,6 +736,7 @@ pub mod funding; pub mod imprint; pub mod issue; pub mod language; +pub mod location; pub mod price; pub mod publication; pub mod publisher; diff --git a/thoth-api/src/model/publication/crud.rs b/thoth-api/src/model/publication/crud.rs index 729e87c6..f600af0d 100644 --- a/thoth-api/src/model/publication/crud.rs +++ b/thoth-api/src/model/publication/crud.rs @@ -6,9 +6,7 @@ use crate::graphql::utils::Direction; use crate::model::{Crud, DbInsert, HistoryEntry}; use crate::schema::{publication, publication_history}; use crate::{crud_methods, db_insert}; -use diesel::{ - BoolExpressionMethods, ExpressionMethods, PgTextExpressionMethods, QueryDsl, RunQueryDsl, -}; +use diesel::{ExpressionMethods, PgTextExpressionMethods, QueryDsl, RunQueryDsl}; use thoth_errors::{ThothError, ThothResult}; use uuid::Uuid; @@ -44,7 +42,6 @@ impl Crud for Publication { dsl::publication_type, dsl::work_id, dsl::isbn, - dsl::publication_url, dsl::created_at, dsl::updated_at, )) @@ -67,10 +64,6 @@ impl Crud for Publication { Direction::Asc => query = query.order(dsl::isbn.asc()), Direction::Desc => query = query.order(dsl::isbn.desc()), }, - PublicationField::PublicationUrl => match order.direction { - Direction::Asc => query = query.order(dsl::publication_url.asc()), - Direction::Desc => query = query.order(dsl::publication_url.desc()), - }, PublicationField::CreatedAt => match order.direction { Direction::Asc => query = query.order(dsl::created_at.asc()), Direction::Desc => query = query.order(dsl::created_at.desc()), @@ -93,13 +86,9 @@ impl Crud for Publication { query = query.filter(dsl::publication_type.eq(pub_type)); } if let Some(filter) = filter { - // ISBN and URL fields are both nullable, so searching with an empty filter could fail + // ISBN field is nullable, so searching with an empty filter could fail if !filter.is_empty() { - query = query.filter( - dsl::isbn - .ilike(format!("%{}%", filter)) - .or(dsl::publication_url.ilike(format!("%{}%", filter))), - ); + query = query.filter(dsl::isbn.ilike(format!("%{}%", filter))); } } match query @@ -128,7 +117,6 @@ impl Crud for Publication { dsl::publication_type, dsl::work_id, dsl::isbn, - dsl::publication_url, dsl::created_at, dsl::updated_at, )) @@ -143,13 +131,9 @@ impl Crud for Publication { query = query.filter(dsl::publication_type.eq(pub_type)); } if let Some(filter) = filter { - // ISBN and URL fields are both nullable, so searching with an empty filter could fail + // ISBN field is nullable, so searching with an empty filter could fail if !filter.is_empty() { - query = query.filter( - dsl::isbn - .ilike(format!("%{}%", filter)) - .or(dsl::publication_url.ilike(format!("%{}%", filter))), - ); + query = query.filter(dsl::isbn.ilike(format!("%{}%", filter))); } } @@ -188,6 +172,37 @@ impl DbInsert for NewPublicationHistory { db_insert!(publication_history::table); } +impl NewPublication { + pub fn can_have_isbn(&self, db: &crate::db::PgPool) -> ThothResult<()> { + publication_can_have_isbn(self.work_id, db) + } +} + +impl PatchPublication { + pub fn can_have_isbn(&self, db: &crate::db::PgPool) -> ThothResult<()> { + publication_can_have_isbn(self.work_id, db) + } +} + +fn publication_can_have_isbn(work_id: Uuid, db: &crate::db::PgPool) -> ThothResult<()> { + use crate::model::work::WorkType; + use diesel::prelude::*; + + let connection = db.get().unwrap(); + let work_type = crate::schema::work::table + .select(crate::schema::work::work_type) + .filter(crate::schema::work::work_id.eq(work_id)) + .first::(&connection) + .expect("Error loading work type for publication"); + // If a publication's work is of type Book Chapter, + // it cannot have an ISBN. + if work_type == WorkType::BookChapter { + Err(ThothError::ChapterIsbnError) + } else { + Ok(()) + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/thoth-api/src/model/publication/mod.rs b/thoth-api/src/model/publication/mod.rs index 854e9422..4bd8a822 100644 --- a/thoth-api/src/model/publication/mod.rs +++ b/thoth-api/src/model/publication/mod.rs @@ -4,6 +4,7 @@ use strum::EnumString; use uuid::Uuid; use crate::graphql::utils::Direction; +use crate::model::location::Location; use crate::model::price::Price; use crate::model::work::WorkWithRelations; use crate::model::Isbn; @@ -53,8 +54,6 @@ pub enum PublicationField { WorkId, #[strum(serialize = "ISBN")] Isbn, - #[strum(serialize = "URL")] - PublicationUrl, CreatedAt, UpdatedAt, } @@ -67,7 +66,6 @@ pub struct Publication { pub publication_type: PublicationType, pub work_id: Uuid, pub isbn: Option, - pub publication_url: Option, pub created_at: Timestamp, pub updated_at: Timestamp, } @@ -79,9 +77,9 @@ pub struct PublicationWithRelations { pub publication_type: PublicationType, pub work_id: Uuid, pub isbn: Option, - pub publication_url: Option, pub updated_at: Timestamp, pub prices: Option>, + pub locations: Option>, pub work: WorkWithRelations, } @@ -94,7 +92,6 @@ pub struct NewPublication { pub publication_type: PublicationType, pub work_id: Uuid, pub isbn: Option, - pub publication_url: Option, } #[cfg_attr( @@ -108,7 +105,6 @@ pub struct PatchPublication { pub publication_type: PublicationType, pub work_id: Uuid, pub isbn: Option, - pub publication_url: Option, } #[cfg_attr(feature = "backend", derive(Queryable))] @@ -183,7 +179,6 @@ fn test_publicationfield_display() { assert_eq!(format!("{}", PublicationField::PublicationType), "Type"); assert_eq!(format!("{}", PublicationField::WorkId), "WorkID"); assert_eq!(format!("{}", PublicationField::Isbn), "ISBN"); - assert_eq!(format!("{}", PublicationField::PublicationUrl), "URL"); assert_eq!(format!("{}", PublicationField::CreatedAt), "CreatedAt"); assert_eq!(format!("{}", PublicationField::UpdatedAt), "UpdatedAt"); } @@ -243,10 +238,6 @@ fn test_publicationfield_fromstr() { PublicationField::from_str("ISBN").unwrap(), PublicationField::Isbn ); - assert_eq!( - PublicationField::from_str("URL").unwrap(), - PublicationField::PublicationUrl - ); assert_eq!( PublicationField::from_str("CreatedAt").unwrap(), PublicationField::CreatedAt diff --git a/thoth-api/src/model/work/crud.rs b/thoth-api/src/model/work/crud.rs index 65efd142..73fec7a4 100644 --- a/thoth-api/src/model/work/crud.rs +++ b/thoth-api/src/model/work/crud.rs @@ -49,6 +49,27 @@ impl Work { } } + pub fn can_be_chapter(&self, db: &crate::db::PgPool) -> ThothResult<()> { + use crate::schema::publication::dsl; + let connection = db.get().unwrap(); + let isbn_count = dsl::publication + .filter(dsl::work_id.eq(self.work_id)) + .filter(dsl::isbn.is_not_null()) + .count() + .get_result::(&connection) + .expect("Error loading publication ISBNs for work") + .to_string() + .parse::() + .unwrap(); + // If a work has any publications with ISBNs, + // its type cannot be changed to Book Chapter. + if isbn_count == 0 { + Ok(()) + } else { + Err(ThothError::ChapterIsbnError) + } + } + pub fn update_with_units( &self, db: &crate::db::PgPool, diff --git a/thoth-api/src/schema.rs b/thoth-api/src/schema.rs index 59d64a3c..07fc13b8 100644 --- a/thoth-api/src/schema.rs +++ b/thoth-api/src/schema.rs @@ -208,6 +208,34 @@ table! { } } +table! { + use diesel::sql_types::*; + use crate::model::location::Location_platform; + + location (location_id) { + location_id -> Uuid, + publication_id -> Uuid, + landing_page -> Nullable, + full_text_url -> Nullable, + location_platform -> Location_platform, + canonical -> Bool, + created_at -> Timestamptz, + updated_at -> Timestamptz, + } +} + +table! { + use diesel::sql_types::*; + + location_history (location_history_id) { + location_history_id -> Uuid, + location_id -> Uuid, + account_id -> Uuid, + data -> Jsonb, + timestamp -> Timestamptz, + } +} + table! { use diesel::sql_types::*; use crate::model::price::Currency_code; @@ -243,7 +271,6 @@ table! { publication_type -> Publication_type, work_id -> Uuid, isbn -> Nullable, - publication_url -> Nullable, created_at -> Timestamptz, updated_at -> Timestamptz, } @@ -430,6 +457,9 @@ joinable!(issue_history -> issue (issue_id)); joinable!(language -> work (work_id)); joinable!(language_history -> account (account_id)); joinable!(language_history -> language (language_id)); +joinable!(location -> publication (publication_id)); +joinable!(location_history -> account (account_id)); +joinable!(location_history -> location (location_id)); joinable!(price -> publication (publication_id)); joinable!(price_history -> account (account_id)); joinable!(price_history -> price (price_id)); @@ -466,6 +496,8 @@ allow_tables_to_appear_in_same_query!( issue_history, language, language_history, + location, + location_history, price, price_history, publication, diff --git a/thoth-app-server/Cargo.toml b/thoth-app-server/Cargo.toml index db3648e1..7432a7b6 100644 --- a/thoth-app-server/Cargo.toml +++ b/thoth-app-server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "thoth-app-server" -version = "0.4.7" +version = "0.5.0" authors = ["Javier Arias ", "Ross Higman "] edition = "2018" license = "Apache-2.0" diff --git a/thoth-app/Cargo.toml b/thoth-app/Cargo.toml index 5527080b..4d944524 100644 --- a/thoth-app/Cargo.toml +++ b/thoth-app/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "thoth-app" -version = "0.4.7" +version = "0.5.0" authors = ["Javier Arias ", "Ross Higman "] edition = "2018" license = "Apache-2.0" @@ -33,5 +33,5 @@ serde = { version = "1.0.115", features = ["derive"] } serde_json = "1.0" url = "2.1.1" uuid = { version = "0.7", features = ["serde", "v4"] } -thoth-api = { version = "0.4.7", path = "../thoth-api" } -thoth-errors = { version = "0.4.7", path = "../thoth-errors" } +thoth-api = { version = "0.5.0", path = "../thoth-api" } +thoth-errors = { version = "0.5.0", path = "../thoth-errors" } diff --git a/thoth-app/manifest.json b/thoth-app/manifest.json index 83546eaa..3c55a5c1 100644 --- a/thoth-app/manifest.json +++ b/thoth-app/manifest.json @@ -9,7 +9,7 @@ "start_url": "/?homescreen=1", "background_color": "#ffffff", "theme_color": "#ffdd57", - "version": "0.4.7", + "version": "0.5.0", "icons": [ { "src": "\/android-icon-36x36.png", diff --git a/thoth-app/src/component/contributions_form.rs b/thoth-app/src/component/contributions_form.rs index 0f45607c..a446647d 100644 --- a/thoth-app/src/component/contributions_form.rs +++ b/thoth-app/src/component/contributions_form.rs @@ -45,6 +45,8 @@ use crate::string::NO; use crate::string::REMOVE_BUTTON; use crate::string::YES; +use super::ToOption; + pub struct ContributionsFormComponent { props: Props, data: ContributionsFormData, @@ -86,7 +88,6 @@ pub enum Msg { ChangeContributiontype(ContributionType), ChangeMainContribution(bool), ChangeOrdinal(String), - DoNothing, } #[derive(Clone, Properties, PartialEq)] @@ -311,29 +312,26 @@ impl Component for ContributionsFormComponent { self.link.send_message(Msg::GetContributors); false } - Msg::ChangeFirstName(val) => { - let value = match val.is_empty() { - true => None, - false => Some(val), - }; - self.new_contribution.first_name.neq_assign(value) - } - Msg::ChangeLastName(val) => self.new_contribution.last_name.neq_assign(val), - Msg::ChangeFullName(val) => self.new_contribution.full_name.neq_assign(val), - Msg::ChangeInstitution(val) => { - let value = match val.is_empty() { - true => None, - false => Some(val), - }; - self.new_contribution.institution.neq_assign(value) - } - Msg::ChangeBiography(val) => { - let value = match val.is_empty() { - true => None, - false => Some(val), - }; - self.new_contribution.biography.neq_assign(value) - } + Msg::ChangeFirstName(val) => self + .new_contribution + .first_name + .neq_assign(val.to_opt_string()), + Msg::ChangeLastName(val) => self + .new_contribution + .last_name + .neq_assign(val.trim().to_owned()), + Msg::ChangeFullName(val) => self + .new_contribution + .full_name + .neq_assign(val.trim().to_owned()), + Msg::ChangeInstitution(val) => self + .new_contribution + .institution + .neq_assign(val.to_opt_string()), + Msg::ChangeBiography(val) => self + .new_contribution + .biography + .neq_assign(val.to_opt_string()), Msg::ChangeContributiontype(val) => { self.new_contribution.contribution_type.neq_assign(val) } @@ -347,7 +345,6 @@ impl Component for ContributionsFormComponent { .neq_assign(ordinal); false // otherwise we re-render the component and reset the value } - Msg::DoNothing => false, // callbacks need to return a message } } @@ -415,9 +412,9 @@ impl Component for ContributionsFormComponent { >